From f9cbe1f8d58d24bd892ebbdbaca84b8fbda69768 Mon Sep 17 00:00:00 2001 From: Cloud SDK Librarian Date: Wed, 26 Nov 2025 08:46:55 +0000 Subject: [PATCH] feat: generate libraries --- .librarian/state.yaml | 8 +- .../services/eventarc/async_client.py | 2 + .../eventarc_v1/services/eventarc/client.py | 2 + .../eventarc_v1/types/google_api_source.py | 68 + .../google/cloud/eventarc_v1/types/trigger.py | 29 + ...eventarc_create_google_api_source_async.py | 1 + ..._eventarc_create_google_api_source_sync.py | 1 + ...eventarc_update_google_api_source_async.py | 1 + ..._eventarc_update_google_api_source_sync.py | 1 + ...pet_metadata_google.cloud.eventarc.v1.json | 56 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 6 + .../google/cloud/netapp_v1/types/volume.py | 47 + .../unit/gapic/netapp_v1/test_net_app.py | 4 + .../dns_threat_detector_service.rst | 10 + .../firewall_activation.rst | 10 + .../network_security_v1alpha1/intercept.rst | 10 + .../network_security_v1alpha1/mirroring.rst | 10 + ...ization_security_profile_group_service.rst | 10 + .../network_security_v1alpha1/services_.rst | 7 + .../sse_gateway_service.rst | 10 + .../sse_realm_service.rst | 10 + .../network_security_v1alpha1/__init__.py | 446 + .../gapic_metadata.json | 2059 +- .../dns_threat_detector_service/__init__.py | 22 + .../async_client.py | 1596 + .../dns_threat_detector_service/client.py | 2056 + .../dns_threat_detector_service/pagers.py | 201 + .../transports/README.rst | 9 + .../transports/__init__.py | 41 + .../transports/base.py | 371 + .../transports/grpc.py | 678 + .../transports/grpc_asyncio.py | 766 + .../transports/rest.py | 2712 ++ .../transports/rest_base.py | 674 + .../services/firewall_activation/__init__.py | 22 + .../firewall_activation/async_client.py | 2332 + .../services/firewall_activation/client.py | 2815 ++ .../services/firewall_activation/pagers.py | 371 + .../firewall_activation/transports/README.rst | 9 + .../transports/__init__.py | 38 + .../firewall_activation/transports/base.py | 442 + .../firewall_activation/transports/grpc.py | 844 + .../transports/grpc_asyncio.py | 960 + .../firewall_activation/transports/rest.py | 3969 ++ .../transports/rest_base.py | 959 + .../services/intercept/__init__.py | 22 + .../services/intercept/async_client.py | 3811 ++ .../services/intercept/client.py | 4324 ++ .../services/intercept/pagers.py | 677 + .../services/intercept/transports/README.rst | 9 + .../services/intercept/transports/__init__.py | 36 + .../services/intercept/transports/base.py | 591 + .../services/intercept/transports/grpc.py | 1180 + .../intercept/transports/grpc_asyncio.py | 1354 + .../services/intercept/transports/rest.py | 6273 +++ .../intercept/transports/rest_base.py | 1447 + .../services/mirroring/__init__.py | 22 + .../services/mirroring/async_client.py | 3792 ++ .../services/mirroring/client.py | 4305 ++ .../services/mirroring/pagers.py | 677 + .../services/mirroring/transports/README.rst | 9 + .../services/mirroring/transports/__init__.py | 36 + .../services/mirroring/transports/base.py | 591 + .../services/mirroring/transports/grpc.py | 1179 + .../mirroring/transports/grpc_asyncio.py | 1353 + .../services/mirroring/transports/rest.py | 6272 +++ .../mirroring/transports/rest_base.py | 1447 + .../services/network_security/async_client.py | 6078 ++- .../services/network_security/client.py | 6208 ++- .../services/network_security/pagers.py | 1398 +- .../network_security/transports/base.py | 633 + .../network_security/transports/grpc.py | 1348 +- .../transports/grpc_asyncio.py | 1583 +- .../network_security/transports/rest.py | 10480 +++- .../network_security/transports/rest_base.py | 2192 +- .../__init__.py | 22 + .../async_client.py | 2410 + .../client.py | 2901 ++ .../pagers.py | 383 + .../transports/README.rst | 9 + .../transports/__init__.py | 43 + .../transports/base.py | 445 + .../transports/grpc.py | 850 + .../transports/grpc_asyncio.py | 963 + .../transports/rest.py | 3994 ++ .../transports/rest_base.py | 964 + .../services/sse_gateway_service/__init__.py | 22 + .../sse_gateway_service/async_client.py | 1867 + .../services/sse_gateway_service/client.py | 2326 + .../services/sse_gateway_service/pagers.py | 355 + .../sse_gateway_service/transports/README.rst | 9 + .../transports/__init__.py | 36 + .../sse_gateway_service/transports/base.py | 396 + .../sse_gateway_service/transports/grpc.py | 748 + .../transports/grpc_asyncio.py | 851 + .../sse_gateway_service/transports/rest.py | 3262 ++ .../transports/rest_base.py | 762 + .../services/sse_realm_service/__init__.py | 22 + .../sse_realm_service/async_client.py | 2528 + .../services/sse_realm_service/client.py | 2983 ++ .../services/sse_realm_service/pagers.py | 509 + .../sse_realm_service/transports/README.rst | 9 + .../sse_realm_service/transports/__init__.py | 36 + .../sse_realm_service/transports/base.py | 466 + .../sse_realm_service/transports/grpc.py | 858 + .../transports/grpc_asyncio.py | 1001 + .../sse_realm_service/transports/rest.py | 4275 ++ .../sse_realm_service/transports/rest_base.py | 1011 + .../types/__init__.py | 410 + .../types/authorization_policy.py | 430 + .../types/authz_policy.py | 1009 + .../types/backend_authentication_config.py | 345 + .../types/dns_threat_detector.py | 262 + .../types/firewall_activation.py | 791 + .../types/gateway_security_policy.py | 250 + .../types/gateway_security_policy_rule.py | 308 + .../types/intercept.py | 1575 + .../types/mirroring.py | 1603 + .../types/security_profile_group.py | 267 + .../types/security_profile_group_intercept.py | 50 + .../types/security_profile_group_mirroring.py | 50 + .../types/security_profile_group_service.py | 376 + ...security_profile_group_threatprevention.py | 287 + .../security_profile_group_urlfiltering.py | 103 + .../types/server_tls_policy.py | 394 + .../types/sse_gateway.py | 647 + .../types/sse_realm.py | 1073 + .../types/tls_inspection_policy.py | 395 + .../types/url_list.py | 240 + ...ervice_create_dns_threat_detector_async.py | 57 + ...service_create_dns_threat_detector_sync.py | 57 + ...ervice_delete_dns_threat_detector_async.py | 50 + ...service_delete_dns_threat_detector_sync.py | 50 + ...r_service_get_dns_threat_detector_async.py | 53 + ...or_service_get_dns_threat_detector_sync.py | 53 + ...service_list_dns_threat_detectors_async.py | 54 + ..._service_list_dns_threat_detectors_sync.py | 54 + ...ervice_update_dns_threat_detector_async.py | 56 + ...service_update_dns_threat_detector_sync.py | 56 + ...ate_firewall_endpoint_association_async.py | 64 + ...eate_firewall_endpoint_association_sync.py | 64 + ...tivation_create_firewall_endpoint_async.py | 62 + ...ctivation_create_firewall_endpoint_sync.py | 62 + ...ete_firewall_endpoint_association_async.py | 57 + ...lete_firewall_endpoint_association_sync.py | 57 + ...tivation_delete_firewall_endpoint_async.py | 57 + ...ctivation_delete_firewall_endpoint_sync.py | 57 + ...get_firewall_endpoint_association_async.py | 53 + ..._get_firewall_endpoint_association_sync.py | 53 + ..._activation_get_firewall_endpoint_async.py | 53 + ...l_activation_get_firewall_endpoint_sync.py | 53 + ...st_firewall_endpoint_associations_async.py | 54 + ...ist_firewall_endpoint_associations_sync.py | 54 + ...ctivation_list_firewall_endpoints_async.py | 54 + ...activation_list_firewall_endpoints_sync.py | 54 + ...ate_firewall_endpoint_association_async.py | 63 + ...date_firewall_endpoint_association_sync.py | 63 + ...tivation_update_firewall_endpoint_async.py | 60 + ...ctivation_update_firewall_endpoint_sync.py | 60 + ...rcept_create_intercept_deployment_async.py | 63 + ...create_intercept_deployment_group_async.py | 62 + ..._create_intercept_deployment_group_sync.py | 62 + ...ercept_create_intercept_deployment_sync.py | 63 + ...ercept_endpoint_group_association_async.py | 66 + ...tercept_endpoint_group_association_sync.py | 66 + ...t_create_intercept_endpoint_group_async.py | 64 + ...pt_create_intercept_endpoint_group_sync.py | 64 + ...rcept_delete_intercept_deployment_async.py | 57 + ...delete_intercept_deployment_group_async.py | 57 + ..._delete_intercept_deployment_group_sync.py | 57 + ...ercept_delete_intercept_deployment_sync.py | 57 + ...ercept_endpoint_group_association_async.py | 57 + ...tercept_endpoint_group_association_sync.py | 57 + ...t_delete_intercept_endpoint_group_async.py | 57 + ...pt_delete_intercept_endpoint_group_sync.py | 57 + ...ntercept_get_intercept_deployment_async.py | 53 + ...pt_get_intercept_deployment_group_async.py | 53 + ...ept_get_intercept_deployment_group_sync.py | 53 + ...intercept_get_intercept_deployment_sync.py | 53 + ...ercept_endpoint_group_association_async.py | 53 + ...tercept_endpoint_group_association_sync.py | 53 + ...cept_get_intercept_endpoint_group_async.py | 53 + ...rcept_get_intercept_endpoint_group_sync.py | 53 + ..._list_intercept_deployment_groups_async.py | 54 + ...t_list_intercept_deployment_groups_sync.py | 54 + ...ercept_list_intercept_deployments_async.py | 54 + ...tercept_list_intercept_deployments_sync.py | 54 + ...rcept_endpoint_group_associations_async.py | 54 + ...ercept_endpoint_group_associations_sync.py | 54 + ...pt_list_intercept_endpoint_groups_async.py | 54 + ...ept_list_intercept_endpoint_groups_sync.py | 54 + ...rcept_update_intercept_deployment_async.py | 61 + ...update_intercept_deployment_group_async.py | 60 + ..._update_intercept_deployment_group_sync.py | 60 + ...ercept_update_intercept_deployment_sync.py | 61 + ...ercept_endpoint_group_association_async.py | 65 + ...tercept_endpoint_group_association_sync.py | 65 + ...t_update_intercept_endpoint_group_async.py | 62 + ...pt_update_intercept_endpoint_group_sync.py | 62 + ...oring_create_mirroring_deployment_async.py | 63 + ...create_mirroring_deployment_group_async.py | 62 + ..._create_mirroring_deployment_group_sync.py | 62 + ...roring_create_mirroring_deployment_sync.py | 63 + ...roring_endpoint_group_association_async.py | 57 + ...rroring_endpoint_group_association_sync.py | 57 + ...g_create_mirroring_endpoint_group_async.py | 58 + ...ng_create_mirroring_endpoint_group_sync.py | 58 + ...oring_delete_mirroring_deployment_async.py | 57 + ...delete_mirroring_deployment_group_async.py | 57 + ..._delete_mirroring_deployment_group_sync.py | 57 + ...roring_delete_mirroring_deployment_sync.py | 57 + ...roring_endpoint_group_association_async.py | 57 + ...rroring_endpoint_group_association_sync.py | 57 + ...g_delete_mirroring_endpoint_group_async.py | 57 + ...ng_delete_mirroring_endpoint_group_sync.py | 57 + ...irroring_get_mirroring_deployment_async.py | 53 + ...ng_get_mirroring_deployment_group_async.py | 53 + ...ing_get_mirroring_deployment_group_sync.py | 53 + ...mirroring_get_mirroring_deployment_sync.py | 53 + ...roring_endpoint_group_association_async.py | 53 + ...rroring_endpoint_group_association_sync.py | 53 + ...ring_get_mirroring_endpoint_group_async.py | 53 + ...oring_get_mirroring_endpoint_group_sync.py | 53 + ..._list_mirroring_deployment_groups_async.py | 54 + ...g_list_mirroring_deployment_groups_sync.py | 54 + ...roring_list_mirroring_deployments_async.py | 54 + ...rroring_list_mirroring_deployments_sync.py | 54 + ...oring_endpoint_group_associations_async.py | 54 + ...roring_endpoint_group_associations_sync.py | 54 + ...ng_list_mirroring_endpoint_groups_async.py | 54 + ...ing_list_mirroring_endpoint_groups_sync.py | 54 + ...oring_update_mirroring_deployment_async.py | 61 + ...update_mirroring_deployment_group_async.py | 60 + ..._update_mirroring_deployment_group_sync.py | 60 + ...roring_update_mirroring_deployment_sync.py | 61 + ...roring_endpoint_group_association_async.py | 55 + ...rroring_endpoint_group_association_sync.py | 55 + ...g_update_mirroring_endpoint_group_async.py | 55 + ...ng_update_mirroring_endpoint_group_sync.py | 55 + ...urity_create_authorization_policy_async.py | 63 + ...curity_create_authorization_policy_sync.py | 63 + ...work_security_create_authz_policy_async.py | 65 + ...twork_security_create_authz_policy_sync.py | 65 + ...ate_backend_authentication_config_async.py | 64 + ...eate_backend_authentication_config_sync.py | 64 + ...ty_create_gateway_security_policy_async.py | 62 + ...eate_gateway_security_policy_rule_async.py | 65 + ...reate_gateway_security_policy_rule_sync.py | 65 + ...ity_create_gateway_security_policy_sync.py | 62 + ...security_create_server_tls_policy_async.py | 62 + ..._security_create_server_tls_policy_sync.py | 62 + ...rity_create_tls_inspection_policy_async.py | 63 + ...urity_create_tls_inspection_policy_sync.py | 63 + ..._network_security_create_url_list_async.py | 63 + ...d_network_security_create_url_list_sync.py | 63 + ...urity_delete_authorization_policy_async.py | 57 + ...curity_delete_authorization_policy_sync.py | 57 + ...work_security_delete_authz_policy_async.py | 57 + ...twork_security_delete_authz_policy_sync.py | 57 + ...ete_backend_authentication_config_async.py | 57 + ...lete_backend_authentication_config_sync.py | 57 + ...ty_delete_gateway_security_policy_async.py | 57 + ...lete_gateway_security_policy_rule_async.py | 57 + ...elete_gateway_security_policy_rule_sync.py | 57 + ...ity_delete_gateway_security_policy_sync.py | 57 + ...security_delete_server_tls_policy_async.py | 57 + ..._security_delete_server_tls_policy_sync.py | 57 + ...rity_delete_tls_inspection_policy_async.py | 57 + ...urity_delete_tls_inspection_policy_sync.py | 57 + ..._network_security_delete_url_list_async.py | 57 + ...d_network_security_delete_url_list_sync.py | 57 + ...security_get_authorization_policy_async.py | 53 + ..._security_get_authorization_policy_sync.py | 53 + ...network_security_get_authz_policy_async.py | 53 + ..._network_security_get_authz_policy_sync.py | 53 + ...get_backend_authentication_config_async.py | 53 + ..._get_backend_authentication_config_sync.py | 53 + ...urity_get_gateway_security_policy_async.py | 53 + ..._get_gateway_security_policy_rule_async.py | 53 + ...y_get_gateway_security_policy_rule_sync.py | 53 + ...curity_get_gateway_security_policy_sync.py | 53 + ...rk_security_get_server_tls_policy_async.py | 53 + ...ork_security_get_server_tls_policy_sync.py | 53 + ...ecurity_get_tls_inspection_policy_async.py | 53 + ...security_get_tls_inspection_policy_sync.py | 53 + ...ted_network_security_get_url_list_async.py | 53 + ...ated_network_security_get_url_list_sync.py | 53 + ...urity_list_authorization_policies_async.py | 54 + ...curity_list_authorization_policies_sync.py | 54 + ...work_security_list_authz_policies_async.py | 54 + ...twork_security_list_authz_policies_sync.py | 54 + ...st_backend_authentication_configs_async.py | 54 + ...ist_backend_authentication_configs_sync.py | 54 + ...ty_list_gateway_security_policies_async.py | 54 + ...ity_list_gateway_security_policies_sync.py | 54 + ...ist_gateway_security_policy_rules_async.py | 54 + ...list_gateway_security_policy_rules_sync.py | 54 + ...security_list_server_tls_policies_async.py | 54 + ..._security_list_server_tls_policies_sync.py | 54 + ...rity_list_tls_inspection_policies_async.py | 54 + ...urity_list_tls_inspection_policies_sync.py | 54 + ...d_network_security_list_url_lists_async.py | 54 + ...ed_network_security_list_url_lists_sync.py | 54 + ...urity_update_authorization_policy_async.py | 61 + ...curity_update_authorization_policy_sync.py | 61 + ...work_security_update_authz_policy_async.py | 63 + ...twork_security_update_authz_policy_sync.py | 63 + ...ate_backend_authentication_config_async.py | 62 + ...date_backend_authentication_config_sync.py | 62 + ...ty_update_gateway_security_policy_async.py | 60 + ...date_gateway_security_policy_rule_async.py | 64 + ...pdate_gateway_security_policy_rule_sync.py | 64 + ...ity_update_gateway_security_policy_sync.py | 60 + ...security_update_server_tls_policy_async.py | 60 + ..._security_update_server_tls_policy_sync.py | 60 + ...rity_update_tls_inspection_policy_async.py | 61 + ...urity_update_tls_inspection_policy_sync.py | 61 + ..._network_security_update_url_list_async.py | 61 + ...d_network_security_update_url_list_sync.py | 61 + ...p_service_create_security_profile_async.py | 60 + ...ice_create_security_profile_group_async.py | 60 + ...vice_create_security_profile_group_sync.py | 58 + ...up_service_create_security_profile_sync.py | 58 + ...p_service_delete_security_profile_async.py | 59 + ...ice_delete_security_profile_group_async.py | 59 + ...vice_delete_security_profile_group_sync.py | 57 + ...up_service_delete_security_profile_sync.py | 57 + ...roup_service_get_security_profile_async.py | 55 + ...ervice_get_security_profile_group_async.py | 55 + ...service_get_security_profile_group_sync.py | 53 + ...group_service_get_security_profile_sync.py | 53 + ...vice_list_security_profile_groups_async.py | 56 + ...rvice_list_security_profile_groups_sync.py | 54 + ...up_service_list_security_profiles_async.py | 56 + ...oup_service_list_security_profiles_sync.py | 54 + ...p_service_update_security_profile_async.py | 57 + ...ice_update_security_profile_group_async.py | 57 + ...vice_update_security_profile_group_sync.py | 55 + ...up_service_update_security_profile_sync.py | 55 + ...ervice_create_partner_sse_gateway_async.py | 62 + ...service_create_partner_sse_gateway_sync.py | 62 + ...ervice_delete_partner_sse_gateway_async.py | 57 + ...service_delete_partner_sse_gateway_sync.py | 57 + ...y_service_get_partner_sse_gateway_async.py | 53 + ...ay_service_get_partner_sse_gateway_sync.py | 53 + ...service_get_sse_gateway_reference_async.py | 53 + ..._service_get_sse_gateway_reference_sync.py | 53 + ...service_list_partner_sse_gateways_async.py | 54 + ..._service_list_partner_sse_gateways_sync.py | 54 + ...rvice_list_sse_gateway_references_async.py | 54 + ...ervice_list_sse_gateway_references_sync.py | 54 + ...ervice_update_partner_sse_gateway_async.py | 60 + ...service_update_partner_sse_gateway_sync.py | 60 + ..._service_create_partner_sse_realm_async.py | 62 + ...m_service_create_partner_sse_realm_sync.py | 62 + ...alm_service_create_sac_attachment_async.py | 63 + ...ealm_service_create_sac_attachment_sync.py | 63 + ...se_realm_service_create_sac_realm_async.py | 58 + ...sse_realm_service_create_sac_realm_sync.py | 58 + ..._service_delete_partner_sse_realm_async.py | 57 + ...m_service_delete_partner_sse_realm_sync.py | 57 + ...alm_service_delete_sac_attachment_async.py | 57 + ...ealm_service_delete_sac_attachment_sync.py | 57 + ...se_realm_service_delete_sac_realm_async.py | 57 + ...sse_realm_service_delete_sac_realm_sync.py | 57 + ...alm_service_get_partner_sse_realm_async.py | 53 + ...ealm_service_get_partner_sse_realm_sync.py | 53 + ..._realm_service_get_sac_attachment_async.py | 53 + ...e_realm_service_get_sac_attachment_sync.py | 53 + ...d_sse_realm_service_get_sac_realm_async.py | 53 + ...ed_sse_realm_service_get_sac_realm_sync.py | 53 + ...m_service_list_partner_sse_realms_async.py | 54 + ...lm_service_list_partner_sse_realms_sync.py | 54 + ...ealm_service_list_sac_attachments_async.py | 54 + ...realm_service_list_sac_attachments_sync.py | 54 + ...sse_realm_service_list_sac_realms_async.py | 54 + ..._sse_realm_service_list_sac_realms_sync.py | 54 + ...google.cloud.networksecurity.v1alpha1.json | 20902 +++++++- ...ixup_network_security_v1alpha1_keywords.py | 124 + .../test_dns_threat_detector_service.py | 8213 ++++ .../test_firewall_activation.py | 12735 +++++ .../test_intercept.py | 21193 +++++++++ .../test_mirroring.py | 21206 +++++++++ .../test_network_security.py | 39476 +++++++++++++++- ...nization_security_profile_group_service.py | 12991 +++++ .../test_sse_gateway_service.py | 10189 ++++ .../test_sse_realm_service.py | 14087 ++++++ 387 files changed, 313630 insertions(+), 3841 deletions(-) create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/dns_threat_detector_service.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/firewall_activation.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/intercept.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/mirroring.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/organization_security_profile_group_service.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_gateway_service.rst create mode 100644 packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_realm_service.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authorization_policy.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authz_policy.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/backend_authentication_config.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/dns_threat_detector.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/firewall_activation.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy_rule.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/intercept.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/mirroring.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_intercept.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_mirroring.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_service.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_threatprevention.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_urlfiltering.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/server_tls_policy.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_gateway.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_realm.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/tls_inspection_policy.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/url_list.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_dns_threat_detector_service.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_firewall_activation.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_intercept.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_mirroring.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_organization_security_profile_group_service.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_gateway_service.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_realm_service.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index fc72a1f57277..1bcd77bfc4f5 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1757,7 +1757,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-eventarc version: 1.17.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 apis: - path: google/cloud/eventarc/v1 service_config: eventarc_v1.yaml @@ -2426,7 +2426,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-netapp version: 0.4.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 apis: - path: google/cloud/netapp/v1 service_config: netapp_v1.yaml @@ -2479,7 +2479,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-security version: 0.9.21 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 apis: - path: google/cloud/networksecurity/v1alpha1 service_config: networksecurity_v1alpha1.yaml @@ -4244,7 +4244,7 @@ libraries: tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.72.0 - last_generated_commit: a17b84add8318f780fcc8a027815d5fee644b9f7 + last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 apis: - path: google/api service_config: serviceconfig.yaml diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 9f7208d448b1..bb0cb6bc0829 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -5120,6 +5120,7 @@ async def sample_create_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.CreateGoogleApiSourceRequest( @@ -5272,6 +5273,7 @@ async def sample_update_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.UpdateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index b1e787007700..5c0ea70c4fd2 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -5726,6 +5726,7 @@ def sample_create_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.CreateGoogleApiSourceRequest( @@ -5875,6 +5876,7 @@ def sample_update_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.UpdateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/google_api_source.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/google_api_source.py index 4401caee8d7e..7a618cdeacca 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/google_api_source.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/google_api_source.py @@ -34,6 +34,13 @@ class GoogleApiSource(proto.Message): r"""A GoogleApiSource represents a subscription of 1P events from a MessageBus. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Identifier. Resource name of the form @@ -73,8 +80,57 @@ class GoogleApiSource(proto.Message): logging_config (google.cloud.eventarc_v1.types.LoggingConfig): Optional. Config to control Platform logging for the GoogleApiSource. + organization_subscription (google.cloud.eventarc_v1.types.GoogleApiSource.OrganizationSubscription): + Optional. Config to enable subscribing to + events from all projects in the + GoogleApiSource's org. + + This field is a member of `oneof`_ ``wide_scope_subscription``. + project_subscriptions (google.cloud.eventarc_v1.types.GoogleApiSource.ProjectSubscriptions): + Optional. Config to enable subscribing to all + events from a list of projects. + + All the projects must be in the same org as the + GoogleApiSource. + + This field is a member of `oneof`_ ``wide_scope_subscription``. """ + class ProjectSubscriptions(proto.Message): + r"""Config to enable subscribing to all events from a list of + projects. + + Attributes: + list_ (MutableSequence[str]): + Required. A list of projects to receive + events from. + All the projects must be in the same org. The + listed projects should have the format + project/{identifier} where identifier can be + either the project id for project number. A + single list may contain both formats. At most + 100 projects can be listed. + """ + + list_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class OrganizationSubscription(proto.Message): + r"""Config to enabled subscribing to events from other projects + in the org. + + Attributes: + enabled (bool): + Required. Enable org level subscription. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -124,6 +180,18 @@ class GoogleApiSource(proto.Message): number=11, message=gce_logging_config.LoggingConfig, ) + organization_subscription: OrganizationSubscription = proto.Field( + proto.MESSAGE, + number=12, + oneof="wide_scope_subscription", + message=OrganizationSubscription, + ) + project_subscriptions: ProjectSubscriptions = proto.Field( + proto.MESSAGE, + number=13, + oneof="wide_scope_subscription", + message=ProjectSubscriptions, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/trigger.py index 324b3bbb23f8..8e213354fe91 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -102,6 +102,13 @@ class Trigger(proto.Message): Output only. Whether or not this Trigger satisfies the requirements of physical zone separation + retry_policy (google.cloud.eventarc_v1.types.Trigger.RetryPolicy): + Optional. The retry policy to use in the + Trigger. + If unset, event delivery will be retried for up + to 24 hours by default: + + https://cloud.google.com/eventarc/docs/retry-events etag (str): Output only. This checksum is computed by the server based on the value of other fields, and @@ -110,6 +117,23 @@ class Trigger(proto.Message): proceeding. """ + class RetryPolicy(proto.Message): + r"""The retry policy configuration for the Trigger. + + Can only be set with Cloud Run destinations. + + Attributes: + max_attempts (int): + Optional. The maximum number of delivery + attempts for any message. The only valid value + is 1. + """ + + max_attempts: int = proto.Field( + proto.INT32, + number=1, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -170,6 +194,11 @@ class Trigger(proto.Message): proto.BOOL, number=19, ) + retry_policy: RetryPolicy = proto.Field( + proto.MESSAGE, + number=20, + message=RetryPolicy, + ) etag: str = proto.Field( proto.STRING, number=99, diff --git a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py index 373fae16034c..8c7d6ee27d0c 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py +++ b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_async.py @@ -40,6 +40,7 @@ async def sample_create_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.CreateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py index 17f3010ef528..e75e206c94fd 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py +++ b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_google_api_source_sync.py @@ -40,6 +40,7 @@ def sample_create_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.CreateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py index adcb46ae801d..ab6c75b826ab 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py +++ b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_async.py @@ -40,6 +40,7 @@ async def sample_update_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.UpdateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py index 26c502c1c3c2..8477eeaa9701 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py +++ b/packages/google-cloud-eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_api_source_sync.py @@ -40,6 +40,7 @@ def sample_update_google_api_source(): # Initialize request argument(s) google_api_source = eventarc_v1.GoogleApiSource() + google_api_source.organization_subscription.enabled = True google_api_source.destination = "destination_value" request = eventarc_v1.UpdateGoogleApiSourceRequest( diff --git a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 4dcc4db3c3f9..ea61b8d9363f 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -599,12 +599,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateGoogleApiSource_async", "segments": [ { - "end": 60, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 61, "start": 27, "type": "SHORT" }, @@ -614,18 +614,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -687,12 +687,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateGoogleApiSource_sync", "segments": [ { - "end": 60, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 61, "start": 27, "type": "SHORT" }, @@ -702,18 +702,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -5706,12 +5706,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_async", "segments": [ { - "end": 58, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 59, "start": 27, "type": "SHORT" }, @@ -5721,18 +5721,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -5790,12 +5790,12 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleApiSource_sync", "segments": [ { - "end": 58, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 59, "start": 27, "type": "SHORT" }, @@ -5805,18 +5805,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 490e6fa628c4..1ca3b699757e 100644 --- a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -26853,6 +26853,7 @@ def test_create_trigger_rest_call_success(request_type): "conditions": {}, "event_data_content_type": "event_data_content_type_value", "satisfies_pzs": True, + "retry_policy": {"max_attempts": 1303}, "etag": "etag_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -27082,6 +27083,7 @@ def test_update_trigger_rest_call_success(request_type): "conditions": {}, "event_data_content_type": "event_data_content_type_value", "satisfies_pzs": True, + "retry_policy": {"max_attempts": 1303}, "etag": "etag_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -32206,6 +32208,8 @@ def test_create_google_api_source_rest_call_success(request_type): "destination": "destination_value", "crypto_key_name": "crypto_key_name_value", "logging_config": {"log_severity": 1}, + "organization_subscription": {"enabled": True}, + "project_subscriptions": {"list_": ["list__value1", "list__value2"]}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -32416,6 +32420,8 @@ def test_update_google_api_source_rest_call_success(request_type): "destination": "destination_value", "crypto_key_name": "crypto_key_name_value", "logging_config": {"log_severity": 1}, + "organization_subscription": {"enabled": True}, + "project_subscriptions": {"list_": ["list__value1", "list__value2"]}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index e92f5ea2eb76..1c5b070bad5b 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -793,8 +793,44 @@ class SimpleExportPolicyRule(proto.Message): ignored if this is enabled. This field is a member of `oneof`_ ``_kerberos_5p_read_write``. + squash_mode (google.cloud.netapp_v1.types.SimpleExportPolicyRule.SquashMode): + Optional. Defines how user identity squashing is applied for + this export rule. This field is the preferred way to + configure squashing behavior and takes precedence over + ``has_root_access`` if both are provided. + + This field is a member of `oneof`_ ``_squash_mode``. + anon_uid (int): + Optional. An integer representing the anonymous user ID. + Range is 0 to 4294967295. Required when squash_mode is + ROOT_SQUASH or ALL_SQUASH. + + This field is a member of `oneof`_ ``_anon_uid``. """ + class SquashMode(proto.Enum): + r"""SquashMode defines how remote user privileges are restricted + when accessing an NFS export. It controls how user identities + (like root) are mapped to anonymous users to limit access and + enforce security. + + Values: + SQUASH_MODE_UNSPECIFIED (0): + Defaults to NO_ROOT_SQUASH. + NO_ROOT_SQUASH (1): + The root user (UID 0) retains full access. + Other users are unaffected. + ROOT_SQUASH (2): + The root user (UID 0) is squashed to + anonymous user ID. Other users are unaffected. + ALL_SQUASH (3): + All users are squashed to anonymous user ID. + """ + SQUASH_MODE_UNSPECIFIED = 0 + NO_ROOT_SQUASH = 1 + ROOT_SQUASH = 2 + ALL_SQUASH = 3 + allowed_clients: str = proto.Field( proto.STRING, number=1, @@ -851,6 +887,17 @@ class SimpleExportPolicyRule(proto.Message): number=11, optional=True, ) + squash_mode: SquashMode = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum=SquashMode, + ) + anon_uid: int = proto.Field( + proto.INT64, + number=13, + optional=True, + ) class SnapshotPolicy(proto.Message): diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index 2d866c15f7be..e3d2cace2d36 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -39488,6 +39488,8 @@ def test_create_volume_rest_call_success(request_type): "kerberos_5i_read_write": True, "kerberos_5p_read_only": True, "kerberos_5p_read_write": True, + "squash_mode": 1, + "anon_uid": 845, } ] }, @@ -39792,6 +39794,8 @@ def test_update_volume_rest_call_success(request_type): "kerberos_5i_read_write": True, "kerberos_5p_read_only": True, "kerberos_5p_read_write": True, + "squash_mode": 1, + "anon_uid": 845, } ] }, diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/dns_threat_detector_service.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/dns_threat_detector_service.rst new file mode 100644 index 000000000000..938331d3e6b4 --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/dns_threat_detector_service.rst @@ -0,0 +1,10 @@ +DnsThreatDetectorService +------------------------------------------ + +.. automodule:: google.cloud.network_security_v1alpha1.services.dns_threat_detector_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/firewall_activation.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/firewall_activation.rst new file mode 100644 index 000000000000..fd143c5fde70 --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/firewall_activation.rst @@ -0,0 +1,10 @@ +FirewallActivation +------------------------------------ + +.. automodule:: google.cloud.network_security_v1alpha1.services.firewall_activation + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.firewall_activation.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/intercept.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/intercept.rst new file mode 100644 index 000000000000..5277544e370a --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/intercept.rst @@ -0,0 +1,10 @@ +Intercept +--------------------------- + +.. automodule:: google.cloud.network_security_v1alpha1.services.intercept + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.intercept.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/mirroring.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/mirroring.rst new file mode 100644 index 000000000000..dd9b88d2b3dc --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/mirroring.rst @@ -0,0 +1,10 @@ +Mirroring +--------------------------- + +.. automodule:: google.cloud.network_security_v1alpha1.services.mirroring + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.mirroring.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/organization_security_profile_group_service.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/organization_security_profile_group_service.rst new file mode 100644 index 000000000000..d07404b75c38 --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/organization_security_profile_group_service.rst @@ -0,0 +1,10 @@ +OrganizationSecurityProfileGroupService +--------------------------------------------------------- + +.. automodule:: google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/services_.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/services_.rst index c516f66fcc1b..6dcb60b8db9f 100644 --- a/packages/google-cloud-network-security/docs/network_security_v1alpha1/services_.rst +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/services_.rst @@ -3,4 +3,11 @@ Services for Google Cloud Network Security v1alpha1 API .. toctree:: :maxdepth: 2 + dns_threat_detector_service + firewall_activation + intercept + mirroring network_security + organization_security_profile_group_service + sse_gateway_service + sse_realm_service diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_gateway_service.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_gateway_service.rst new file mode 100644 index 000000000000..ec54f994b3e7 --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_gateway_service.rst @@ -0,0 +1,10 @@ +SSEGatewayService +----------------------------------- + +.. automodule:: google.cloud.network_security_v1alpha1.services.sse_gateway_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_realm_service.rst b/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_realm_service.rst new file mode 100644 index 000000000000..2eef5febd121 --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1alpha1/sse_realm_service.rst @@ -0,0 +1,10 @@ +SSERealmService +--------------------------------- + +.. automodule:: google.cloud.network_security_v1alpha1.services.sse_realm_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/__init__.py index a28d05da9ace..7e3bb8547657 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/__init__.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/__init__.py @@ -18,7 +18,56 @@ __version__ = package_version.__version__ +from .services.dns_threat_detector_service import ( + DnsThreatDetectorServiceAsyncClient, + DnsThreatDetectorServiceClient, +) +from .services.firewall_activation import ( + FirewallActivationAsyncClient, + FirewallActivationClient, +) +from .services.intercept import InterceptAsyncClient, InterceptClient +from .services.mirroring import MirroringAsyncClient, MirroringClient from .services.network_security import NetworkSecurityAsyncClient, NetworkSecurityClient +from .services.organization_security_profile_group_service import ( + OrganizationSecurityProfileGroupServiceAsyncClient, + OrganizationSecurityProfileGroupServiceClient, +) +from .services.sse_gateway_service import ( + SSEGatewayServiceAsyncClient, + SSEGatewayServiceClient, +) +from .services.sse_realm_service import ( + SSERealmServiceAsyncClient, + SSERealmServiceClient, +) +from .types.authorization_policy import ( + AuthorizationPolicy, + CreateAuthorizationPolicyRequest, + DeleteAuthorizationPolicyRequest, + GetAuthorizationPolicyRequest, + ListAuthorizationPoliciesRequest, + ListAuthorizationPoliciesResponse, + UpdateAuthorizationPolicyRequest, +) +from .types.authz_policy import ( + AuthzPolicy, + CreateAuthzPolicyRequest, + DeleteAuthzPolicyRequest, + GetAuthzPolicyRequest, + ListAuthzPoliciesRequest, + ListAuthzPoliciesResponse, + UpdateAuthzPolicyRequest, +) +from .types.backend_authentication_config import ( + BackendAuthenticationConfig, + CreateBackendAuthenticationConfigRequest, + DeleteBackendAuthenticationConfigRequest, + GetBackendAuthenticationConfigRequest, + ListBackendAuthenticationConfigsRequest, + ListBackendAuthenticationConfigsResponse, + UpdateBackendAuthenticationConfigRequest, +) from .types.client_tls_policy import ( ClientTlsPolicy, CreateClientTlsPolicyRequest, @@ -29,26 +78,423 @@ UpdateClientTlsPolicyRequest, ) from .types.common import OperationMetadata +from .types.dns_threat_detector import ( + CreateDnsThreatDetectorRequest, + DeleteDnsThreatDetectorRequest, + DnsThreatDetector, + GetDnsThreatDetectorRequest, + ListDnsThreatDetectorsRequest, + ListDnsThreatDetectorsResponse, + UpdateDnsThreatDetectorRequest, +) +from .types.firewall_activation import ( + CreateFirewallEndpointAssociationRequest, + CreateFirewallEndpointRequest, + DeleteFirewallEndpointAssociationRequest, + DeleteFirewallEndpointRequest, + FirewallEndpoint, + FirewallEndpointAssociation, + GetFirewallEndpointAssociationRequest, + GetFirewallEndpointRequest, + ListFirewallEndpointAssociationsRequest, + ListFirewallEndpointAssociationsResponse, + ListFirewallEndpointsRequest, + ListFirewallEndpointsResponse, + UpdateFirewallEndpointAssociationRequest, + UpdateFirewallEndpointRequest, +) +from .types.gateway_security_policy import ( + CreateGatewaySecurityPolicyRequest, + DeleteGatewaySecurityPolicyRequest, + GatewaySecurityPolicy, + GetGatewaySecurityPolicyRequest, + ListGatewaySecurityPoliciesRequest, + ListGatewaySecurityPoliciesResponse, + UpdateGatewaySecurityPolicyRequest, +) +from .types.gateway_security_policy_rule import ( + CreateGatewaySecurityPolicyRuleRequest, + DeleteGatewaySecurityPolicyRuleRequest, + GatewaySecurityPolicyRule, + GetGatewaySecurityPolicyRuleRequest, + ListGatewaySecurityPolicyRulesRequest, + ListGatewaySecurityPolicyRulesResponse, + UpdateGatewaySecurityPolicyRuleRequest, +) +from .types.intercept import ( + CreateInterceptDeploymentGroupRequest, + CreateInterceptDeploymentRequest, + CreateInterceptEndpointGroupAssociationRequest, + CreateInterceptEndpointGroupRequest, + DeleteInterceptDeploymentGroupRequest, + DeleteInterceptDeploymentRequest, + DeleteInterceptEndpointGroupAssociationRequest, + DeleteInterceptEndpointGroupRequest, + GetInterceptDeploymentGroupRequest, + GetInterceptDeploymentRequest, + GetInterceptEndpointGroupAssociationRequest, + GetInterceptEndpointGroupRequest, + InterceptDeployment, + InterceptDeploymentGroup, + InterceptEndpointGroup, + InterceptEndpointGroupAssociation, + InterceptLocation, + ListInterceptDeploymentGroupsRequest, + ListInterceptDeploymentGroupsResponse, + ListInterceptDeploymentsRequest, + ListInterceptDeploymentsResponse, + ListInterceptEndpointGroupAssociationsRequest, + ListInterceptEndpointGroupAssociationsResponse, + ListInterceptEndpointGroupsRequest, + ListInterceptEndpointGroupsResponse, + UpdateInterceptDeploymentGroupRequest, + UpdateInterceptDeploymentRequest, + UpdateInterceptEndpointGroupAssociationRequest, + UpdateInterceptEndpointGroupRequest, +) +from .types.mirroring import ( + CreateMirroringDeploymentGroupRequest, + CreateMirroringDeploymentRequest, + CreateMirroringEndpointGroupAssociationRequest, + CreateMirroringEndpointGroupRequest, + DeleteMirroringDeploymentGroupRequest, + DeleteMirroringDeploymentRequest, + DeleteMirroringEndpointGroupAssociationRequest, + DeleteMirroringEndpointGroupRequest, + GetMirroringDeploymentGroupRequest, + GetMirroringDeploymentRequest, + GetMirroringEndpointGroupAssociationRequest, + GetMirroringEndpointGroupRequest, + ListMirroringDeploymentGroupsRequest, + ListMirroringDeploymentGroupsResponse, + ListMirroringDeploymentsRequest, + ListMirroringDeploymentsResponse, + ListMirroringEndpointGroupAssociationsRequest, + ListMirroringEndpointGroupAssociationsResponse, + ListMirroringEndpointGroupsRequest, + ListMirroringEndpointGroupsResponse, + MirroringDeployment, + MirroringDeploymentGroup, + MirroringEndpointGroup, + MirroringEndpointGroupAssociation, + MirroringLocation, + UpdateMirroringDeploymentGroupRequest, + UpdateMirroringDeploymentRequest, + UpdateMirroringEndpointGroupAssociationRequest, + UpdateMirroringEndpointGroupRequest, +) +from .types.security_profile_group import SecurityProfile, SecurityProfileGroup +from .types.security_profile_group_intercept import CustomInterceptProfile +from .types.security_profile_group_mirroring import CustomMirroringProfile +from .types.security_profile_group_service import ( + CreateSecurityProfileGroupRequest, + CreateSecurityProfileRequest, + DeleteSecurityProfileGroupRequest, + DeleteSecurityProfileRequest, + GetSecurityProfileGroupRequest, + GetSecurityProfileRequest, + ListSecurityProfileGroupsRequest, + ListSecurityProfileGroupsResponse, + ListSecurityProfilesRequest, + ListSecurityProfilesResponse, + UpdateSecurityProfileGroupRequest, + UpdateSecurityProfileRequest, +) +from .types.security_profile_group_threatprevention import ( + AntivirusOverride, + Protocol, + Severity, + SeverityOverride, + ThreatAction, + ThreatOverride, + ThreatPreventionProfile, + ThreatType, +) +from .types.security_profile_group_urlfiltering import UrlFilter, UrlFilteringProfile +from .types.server_tls_policy import ( + CreateServerTlsPolicyRequest, + DeleteServerTlsPolicyRequest, + GetServerTlsPolicyRequest, + ListServerTlsPoliciesRequest, + ListServerTlsPoliciesResponse, + ServerTlsPolicy, + UpdateServerTlsPolicyRequest, +) +from .types.sse_gateway import ( + CreatePartnerSSEGatewayRequest, + DeletePartnerSSEGatewayRequest, + GetPartnerSSEGatewayRequest, + GetSSEGatewayReferenceRequest, + ListPartnerSSEGatewaysRequest, + ListPartnerSSEGatewaysResponse, + ListSSEGatewayReferencesRequest, + ListSSEGatewayReferencesResponse, + PartnerSSEGateway, + SSEGatewayReference, + UpdatePartnerSSEGatewayRequest, +) +from .types.sse_realm import ( + CreatePartnerSSERealmRequest, + CreateSACAttachmentRequest, + CreateSACRealmRequest, + DeletePartnerSSERealmRequest, + DeleteSACAttachmentRequest, + DeleteSACRealmRequest, + GetPartnerSSERealmRequest, + GetSACAttachmentRequest, + GetSACRealmRequest, + ListPartnerSSERealmsRequest, + ListPartnerSSERealmsResponse, + ListSACAttachmentsRequest, + ListSACAttachmentsResponse, + ListSACRealmsRequest, + ListSACRealmsResponse, + PartnerSSERealm, + SACAttachment, + SACRealm, +) from .types.tls import ( CertificateProvider, CertificateProviderInstance, GrpcEndpoint, ValidationCA, ) +from .types.tls_inspection_policy import ( + CreateTlsInspectionPolicyRequest, + DeleteTlsInspectionPolicyRequest, + GetTlsInspectionPolicyRequest, + ListTlsInspectionPoliciesRequest, + ListTlsInspectionPoliciesResponse, + TlsInspectionPolicy, + UpdateTlsInspectionPolicyRequest, +) +from .types.url_list import ( + CreateUrlListRequest, + DeleteUrlListRequest, + GetUrlListRequest, + ListUrlListsRequest, + ListUrlListsResponse, + UpdateUrlListRequest, + UrlList, +) __all__ = ( + "DnsThreatDetectorServiceAsyncClient", + "FirewallActivationAsyncClient", + "InterceptAsyncClient", + "MirroringAsyncClient", "NetworkSecurityAsyncClient", + "OrganizationSecurityProfileGroupServiceAsyncClient", + "SSEGatewayServiceAsyncClient", + "SSERealmServiceAsyncClient", + "AntivirusOverride", + "AuthorizationPolicy", + "AuthzPolicy", + "BackendAuthenticationConfig", "CertificateProvider", "CertificateProviderInstance", "ClientTlsPolicy", + "CreateAuthorizationPolicyRequest", + "CreateAuthzPolicyRequest", + "CreateBackendAuthenticationConfigRequest", "CreateClientTlsPolicyRequest", + "CreateDnsThreatDetectorRequest", + "CreateFirewallEndpointAssociationRequest", + "CreateFirewallEndpointRequest", + "CreateGatewaySecurityPolicyRequest", + "CreateGatewaySecurityPolicyRuleRequest", + "CreateInterceptDeploymentGroupRequest", + "CreateInterceptDeploymentRequest", + "CreateInterceptEndpointGroupAssociationRequest", + "CreateInterceptEndpointGroupRequest", + "CreateMirroringDeploymentGroupRequest", + "CreateMirroringDeploymentRequest", + "CreateMirroringEndpointGroupAssociationRequest", + "CreateMirroringEndpointGroupRequest", + "CreatePartnerSSEGatewayRequest", + "CreatePartnerSSERealmRequest", + "CreateSACAttachmentRequest", + "CreateSACRealmRequest", + "CreateSecurityProfileGroupRequest", + "CreateSecurityProfileRequest", + "CreateServerTlsPolicyRequest", + "CreateTlsInspectionPolicyRequest", + "CreateUrlListRequest", + "CustomInterceptProfile", + "CustomMirroringProfile", + "DeleteAuthorizationPolicyRequest", + "DeleteAuthzPolicyRequest", + "DeleteBackendAuthenticationConfigRequest", "DeleteClientTlsPolicyRequest", + "DeleteDnsThreatDetectorRequest", + "DeleteFirewallEndpointAssociationRequest", + "DeleteFirewallEndpointRequest", + "DeleteGatewaySecurityPolicyRequest", + "DeleteGatewaySecurityPolicyRuleRequest", + "DeleteInterceptDeploymentGroupRequest", + "DeleteInterceptDeploymentRequest", + "DeleteInterceptEndpointGroupAssociationRequest", + "DeleteInterceptEndpointGroupRequest", + "DeleteMirroringDeploymentGroupRequest", + "DeleteMirroringDeploymentRequest", + "DeleteMirroringEndpointGroupAssociationRequest", + "DeleteMirroringEndpointGroupRequest", + "DeletePartnerSSEGatewayRequest", + "DeletePartnerSSERealmRequest", + "DeleteSACAttachmentRequest", + "DeleteSACRealmRequest", + "DeleteSecurityProfileGroupRequest", + "DeleteSecurityProfileRequest", + "DeleteServerTlsPolicyRequest", + "DeleteTlsInspectionPolicyRequest", + "DeleteUrlListRequest", + "DnsThreatDetector", + "DnsThreatDetectorServiceClient", + "FirewallActivationClient", + "FirewallEndpoint", + "FirewallEndpointAssociation", + "GatewaySecurityPolicy", + "GatewaySecurityPolicyRule", + "GetAuthorizationPolicyRequest", + "GetAuthzPolicyRequest", + "GetBackendAuthenticationConfigRequest", "GetClientTlsPolicyRequest", + "GetDnsThreatDetectorRequest", + "GetFirewallEndpointAssociationRequest", + "GetFirewallEndpointRequest", + "GetGatewaySecurityPolicyRequest", + "GetGatewaySecurityPolicyRuleRequest", + "GetInterceptDeploymentGroupRequest", + "GetInterceptDeploymentRequest", + "GetInterceptEndpointGroupAssociationRequest", + "GetInterceptEndpointGroupRequest", + "GetMirroringDeploymentGroupRequest", + "GetMirroringDeploymentRequest", + "GetMirroringEndpointGroupAssociationRequest", + "GetMirroringEndpointGroupRequest", + "GetPartnerSSEGatewayRequest", + "GetPartnerSSERealmRequest", + "GetSACAttachmentRequest", + "GetSACRealmRequest", + "GetSSEGatewayReferenceRequest", + "GetSecurityProfileGroupRequest", + "GetSecurityProfileRequest", + "GetServerTlsPolicyRequest", + "GetTlsInspectionPolicyRequest", + "GetUrlListRequest", "GrpcEndpoint", + "InterceptClient", + "InterceptDeployment", + "InterceptDeploymentGroup", + "InterceptEndpointGroup", + "InterceptEndpointGroupAssociation", + "InterceptLocation", + "ListAuthorizationPoliciesRequest", + "ListAuthorizationPoliciesResponse", + "ListAuthzPoliciesRequest", + "ListAuthzPoliciesResponse", + "ListBackendAuthenticationConfigsRequest", + "ListBackendAuthenticationConfigsResponse", "ListClientTlsPoliciesRequest", "ListClientTlsPoliciesResponse", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", + "ListFirewallEndpointAssociationsRequest", + "ListFirewallEndpointAssociationsResponse", + "ListFirewallEndpointsRequest", + "ListFirewallEndpointsResponse", + "ListGatewaySecurityPoliciesRequest", + "ListGatewaySecurityPoliciesResponse", + "ListGatewaySecurityPolicyRulesRequest", + "ListGatewaySecurityPolicyRulesResponse", + "ListInterceptDeploymentGroupsRequest", + "ListInterceptDeploymentGroupsResponse", + "ListInterceptDeploymentsRequest", + "ListInterceptDeploymentsResponse", + "ListInterceptEndpointGroupAssociationsRequest", + "ListInterceptEndpointGroupAssociationsResponse", + "ListInterceptEndpointGroupsRequest", + "ListInterceptEndpointGroupsResponse", + "ListMirroringDeploymentGroupsRequest", + "ListMirroringDeploymentGroupsResponse", + "ListMirroringDeploymentsRequest", + "ListMirroringDeploymentsResponse", + "ListMirroringEndpointGroupAssociationsRequest", + "ListMirroringEndpointGroupAssociationsResponse", + "ListMirroringEndpointGroupsRequest", + "ListMirroringEndpointGroupsResponse", + "ListPartnerSSEGatewaysRequest", + "ListPartnerSSEGatewaysResponse", + "ListPartnerSSERealmsRequest", + "ListPartnerSSERealmsResponse", + "ListSACAttachmentsRequest", + "ListSACAttachmentsResponse", + "ListSACRealmsRequest", + "ListSACRealmsResponse", + "ListSSEGatewayReferencesRequest", + "ListSSEGatewayReferencesResponse", + "ListSecurityProfileGroupsRequest", + "ListSecurityProfileGroupsResponse", + "ListSecurityProfilesRequest", + "ListSecurityProfilesResponse", + "ListServerTlsPoliciesRequest", + "ListServerTlsPoliciesResponse", + "ListTlsInspectionPoliciesRequest", + "ListTlsInspectionPoliciesResponse", + "ListUrlListsRequest", + "ListUrlListsResponse", + "MirroringClient", + "MirroringDeployment", + "MirroringDeploymentGroup", + "MirroringEndpointGroup", + "MirroringEndpointGroupAssociation", + "MirroringLocation", "NetworkSecurityClient", "OperationMetadata", + "OrganizationSecurityProfileGroupServiceClient", + "PartnerSSEGateway", + "PartnerSSERealm", + "Protocol", + "SACAttachment", + "SACRealm", + "SSEGatewayReference", + "SSEGatewayServiceClient", + "SSERealmServiceClient", + "SecurityProfile", + "SecurityProfileGroup", + "ServerTlsPolicy", + "Severity", + "SeverityOverride", + "ThreatAction", + "ThreatOverride", + "ThreatPreventionProfile", + "ThreatType", + "TlsInspectionPolicy", + "UpdateAuthorizationPolicyRequest", + "UpdateAuthzPolicyRequest", + "UpdateBackendAuthenticationConfigRequest", "UpdateClientTlsPolicyRequest", + "UpdateDnsThreatDetectorRequest", + "UpdateFirewallEndpointAssociationRequest", + "UpdateFirewallEndpointRequest", + "UpdateGatewaySecurityPolicyRequest", + "UpdateGatewaySecurityPolicyRuleRequest", + "UpdateInterceptDeploymentGroupRequest", + "UpdateInterceptDeploymentRequest", + "UpdateInterceptEndpointGroupAssociationRequest", + "UpdateInterceptEndpointGroupRequest", + "UpdateMirroringDeploymentGroupRequest", + "UpdateMirroringDeploymentRequest", + "UpdateMirroringEndpointGroupAssociationRequest", + "UpdateMirroringEndpointGroupRequest", + "UpdatePartnerSSEGatewayRequest", + "UpdateSecurityProfileGroupRequest", + "UpdateSecurityProfileRequest", + "UpdateServerTlsPolicyRequest", + "UpdateTlsInspectionPolicyRequest", + "UpdateUrlListRequest", + "UrlFilter", + "UrlFilteringProfile", + "UrlList", "ValidationCA", ) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/gapic_metadata.json b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/gapic_metadata.json index 2dcb3d449f64..94552e68ad85 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/gapic_metadata.json +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/gapic_metadata.json @@ -5,94 +5,2087 @@ "protoPackage": "google.cloud.networksecurity.v1alpha1", "schema": "1.0", "services": { + "DnsThreatDetectorService": { + "clients": { + "grpc": { + "libraryClient": "DnsThreatDetectorServiceClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DnsThreatDetectorServiceAsyncClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + }, + "rest": { + "libraryClient": "DnsThreatDetectorServiceClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + } + } + }, + "FirewallActivation": { + "clients": { + "grpc": { + "libraryClient": "FirewallActivationClient", + "rpcs": { + "CreateFirewallEndpoint": { + "methods": [ + "create_firewall_endpoint" + ] + }, + "CreateFirewallEndpointAssociation": { + "methods": [ + "create_firewall_endpoint_association" + ] + }, + "DeleteFirewallEndpoint": { + "methods": [ + "delete_firewall_endpoint" + ] + }, + "DeleteFirewallEndpointAssociation": { + "methods": [ + "delete_firewall_endpoint_association" + ] + }, + "GetFirewallEndpoint": { + "methods": [ + "get_firewall_endpoint" + ] + }, + "GetFirewallEndpointAssociation": { + "methods": [ + "get_firewall_endpoint_association" + ] + }, + "ListFirewallEndpointAssociations": { + "methods": [ + "list_firewall_endpoint_associations" + ] + }, + "ListFirewallEndpoints": { + "methods": [ + "list_firewall_endpoints" + ] + }, + "UpdateFirewallEndpoint": { + "methods": [ + "update_firewall_endpoint" + ] + }, + "UpdateFirewallEndpointAssociation": { + "methods": [ + "update_firewall_endpoint_association" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FirewallActivationAsyncClient", + "rpcs": { + "CreateFirewallEndpoint": { + "methods": [ + "create_firewall_endpoint" + ] + }, + "CreateFirewallEndpointAssociation": { + "methods": [ + "create_firewall_endpoint_association" + ] + }, + "DeleteFirewallEndpoint": { + "methods": [ + "delete_firewall_endpoint" + ] + }, + "DeleteFirewallEndpointAssociation": { + "methods": [ + "delete_firewall_endpoint_association" + ] + }, + "GetFirewallEndpoint": { + "methods": [ + "get_firewall_endpoint" + ] + }, + "GetFirewallEndpointAssociation": { + "methods": [ + "get_firewall_endpoint_association" + ] + }, + "ListFirewallEndpointAssociations": { + "methods": [ + "list_firewall_endpoint_associations" + ] + }, + "ListFirewallEndpoints": { + "methods": [ + "list_firewall_endpoints" + ] + }, + "UpdateFirewallEndpoint": { + "methods": [ + "update_firewall_endpoint" + ] + }, + "UpdateFirewallEndpointAssociation": { + "methods": [ + "update_firewall_endpoint_association" + ] + } + } + }, + "rest": { + "libraryClient": "FirewallActivationClient", + "rpcs": { + "CreateFirewallEndpoint": { + "methods": [ + "create_firewall_endpoint" + ] + }, + "CreateFirewallEndpointAssociation": { + "methods": [ + "create_firewall_endpoint_association" + ] + }, + "DeleteFirewallEndpoint": { + "methods": [ + "delete_firewall_endpoint" + ] + }, + "DeleteFirewallEndpointAssociation": { + "methods": [ + "delete_firewall_endpoint_association" + ] + }, + "GetFirewallEndpoint": { + "methods": [ + "get_firewall_endpoint" + ] + }, + "GetFirewallEndpointAssociation": { + "methods": [ + "get_firewall_endpoint_association" + ] + }, + "ListFirewallEndpointAssociations": { + "methods": [ + "list_firewall_endpoint_associations" + ] + }, + "ListFirewallEndpoints": { + "methods": [ + "list_firewall_endpoints" + ] + }, + "UpdateFirewallEndpoint": { + "methods": [ + "update_firewall_endpoint" + ] + }, + "UpdateFirewallEndpointAssociation": { + "methods": [ + "update_firewall_endpoint_association" + ] + } + } + } + } + }, + "Intercept": { + "clients": { + "grpc": { + "libraryClient": "InterceptClient", + "rpcs": { + "CreateInterceptDeployment": { + "methods": [ + "create_intercept_deployment" + ] + }, + "CreateInterceptDeploymentGroup": { + "methods": [ + "create_intercept_deployment_group" + ] + }, + "CreateInterceptEndpointGroup": { + "methods": [ + "create_intercept_endpoint_group" + ] + }, + "CreateInterceptEndpointGroupAssociation": { + "methods": [ + "create_intercept_endpoint_group_association" + ] + }, + "DeleteInterceptDeployment": { + "methods": [ + "delete_intercept_deployment" + ] + }, + "DeleteInterceptDeploymentGroup": { + "methods": [ + "delete_intercept_deployment_group" + ] + }, + "DeleteInterceptEndpointGroup": { + "methods": [ + "delete_intercept_endpoint_group" + ] + }, + "DeleteInterceptEndpointGroupAssociation": { + "methods": [ + "delete_intercept_endpoint_group_association" + ] + }, + "GetInterceptDeployment": { + "methods": [ + "get_intercept_deployment" + ] + }, + "GetInterceptDeploymentGroup": { + "methods": [ + "get_intercept_deployment_group" + ] + }, + "GetInterceptEndpointGroup": { + "methods": [ + "get_intercept_endpoint_group" + ] + }, + "GetInterceptEndpointGroupAssociation": { + "methods": [ + "get_intercept_endpoint_group_association" + ] + }, + "ListInterceptDeploymentGroups": { + "methods": [ + "list_intercept_deployment_groups" + ] + }, + "ListInterceptDeployments": { + "methods": [ + "list_intercept_deployments" + ] + }, + "ListInterceptEndpointGroupAssociations": { + "methods": [ + "list_intercept_endpoint_group_associations" + ] + }, + "ListInterceptEndpointGroups": { + "methods": [ + "list_intercept_endpoint_groups" + ] + }, + "UpdateInterceptDeployment": { + "methods": [ + "update_intercept_deployment" + ] + }, + "UpdateInterceptDeploymentGroup": { + "methods": [ + "update_intercept_deployment_group" + ] + }, + "UpdateInterceptEndpointGroup": { + "methods": [ + "update_intercept_endpoint_group" + ] + }, + "UpdateInterceptEndpointGroupAssociation": { + "methods": [ + "update_intercept_endpoint_group_association" + ] + } + } + }, + "grpc-async": { + "libraryClient": "InterceptAsyncClient", + "rpcs": { + "CreateInterceptDeployment": { + "methods": [ + "create_intercept_deployment" + ] + }, + "CreateInterceptDeploymentGroup": { + "methods": [ + "create_intercept_deployment_group" + ] + }, + "CreateInterceptEndpointGroup": { + "methods": [ + "create_intercept_endpoint_group" + ] + }, + "CreateInterceptEndpointGroupAssociation": { + "methods": [ + "create_intercept_endpoint_group_association" + ] + }, + "DeleteInterceptDeployment": { + "methods": [ + "delete_intercept_deployment" + ] + }, + "DeleteInterceptDeploymentGroup": { + "methods": [ + "delete_intercept_deployment_group" + ] + }, + "DeleteInterceptEndpointGroup": { + "methods": [ + "delete_intercept_endpoint_group" + ] + }, + "DeleteInterceptEndpointGroupAssociation": { + "methods": [ + "delete_intercept_endpoint_group_association" + ] + }, + "GetInterceptDeployment": { + "methods": [ + "get_intercept_deployment" + ] + }, + "GetInterceptDeploymentGroup": { + "methods": [ + "get_intercept_deployment_group" + ] + }, + "GetInterceptEndpointGroup": { + "methods": [ + "get_intercept_endpoint_group" + ] + }, + "GetInterceptEndpointGroupAssociation": { + "methods": [ + "get_intercept_endpoint_group_association" + ] + }, + "ListInterceptDeploymentGroups": { + "methods": [ + "list_intercept_deployment_groups" + ] + }, + "ListInterceptDeployments": { + "methods": [ + "list_intercept_deployments" + ] + }, + "ListInterceptEndpointGroupAssociations": { + "methods": [ + "list_intercept_endpoint_group_associations" + ] + }, + "ListInterceptEndpointGroups": { + "methods": [ + "list_intercept_endpoint_groups" + ] + }, + "UpdateInterceptDeployment": { + "methods": [ + "update_intercept_deployment" + ] + }, + "UpdateInterceptDeploymentGroup": { + "methods": [ + "update_intercept_deployment_group" + ] + }, + "UpdateInterceptEndpointGroup": { + "methods": [ + "update_intercept_endpoint_group" + ] + }, + "UpdateInterceptEndpointGroupAssociation": { + "methods": [ + "update_intercept_endpoint_group_association" + ] + } + } + }, + "rest": { + "libraryClient": "InterceptClient", + "rpcs": { + "CreateInterceptDeployment": { + "methods": [ + "create_intercept_deployment" + ] + }, + "CreateInterceptDeploymentGroup": { + "methods": [ + "create_intercept_deployment_group" + ] + }, + "CreateInterceptEndpointGroup": { + "methods": [ + "create_intercept_endpoint_group" + ] + }, + "CreateInterceptEndpointGroupAssociation": { + "methods": [ + "create_intercept_endpoint_group_association" + ] + }, + "DeleteInterceptDeployment": { + "methods": [ + "delete_intercept_deployment" + ] + }, + "DeleteInterceptDeploymentGroup": { + "methods": [ + "delete_intercept_deployment_group" + ] + }, + "DeleteInterceptEndpointGroup": { + "methods": [ + "delete_intercept_endpoint_group" + ] + }, + "DeleteInterceptEndpointGroupAssociation": { + "methods": [ + "delete_intercept_endpoint_group_association" + ] + }, + "GetInterceptDeployment": { + "methods": [ + "get_intercept_deployment" + ] + }, + "GetInterceptDeploymentGroup": { + "methods": [ + "get_intercept_deployment_group" + ] + }, + "GetInterceptEndpointGroup": { + "methods": [ + "get_intercept_endpoint_group" + ] + }, + "GetInterceptEndpointGroupAssociation": { + "methods": [ + "get_intercept_endpoint_group_association" + ] + }, + "ListInterceptDeploymentGroups": { + "methods": [ + "list_intercept_deployment_groups" + ] + }, + "ListInterceptDeployments": { + "methods": [ + "list_intercept_deployments" + ] + }, + "ListInterceptEndpointGroupAssociations": { + "methods": [ + "list_intercept_endpoint_group_associations" + ] + }, + "ListInterceptEndpointGroups": { + "methods": [ + "list_intercept_endpoint_groups" + ] + }, + "UpdateInterceptDeployment": { + "methods": [ + "update_intercept_deployment" + ] + }, + "UpdateInterceptDeploymentGroup": { + "methods": [ + "update_intercept_deployment_group" + ] + }, + "UpdateInterceptEndpointGroup": { + "methods": [ + "update_intercept_endpoint_group" + ] + }, + "UpdateInterceptEndpointGroupAssociation": { + "methods": [ + "update_intercept_endpoint_group_association" + ] + } + } + } + } + }, + "Mirroring": { + "clients": { + "grpc": { + "libraryClient": "MirroringClient", + "rpcs": { + "CreateMirroringDeployment": { + "methods": [ + "create_mirroring_deployment" + ] + }, + "CreateMirroringDeploymentGroup": { + "methods": [ + "create_mirroring_deployment_group" + ] + }, + "CreateMirroringEndpointGroup": { + "methods": [ + "create_mirroring_endpoint_group" + ] + }, + "CreateMirroringEndpointGroupAssociation": { + "methods": [ + "create_mirroring_endpoint_group_association" + ] + }, + "DeleteMirroringDeployment": { + "methods": [ + "delete_mirroring_deployment" + ] + }, + "DeleteMirroringDeploymentGroup": { + "methods": [ + "delete_mirroring_deployment_group" + ] + }, + "DeleteMirroringEndpointGroup": { + "methods": [ + "delete_mirroring_endpoint_group" + ] + }, + "DeleteMirroringEndpointGroupAssociation": { + "methods": [ + "delete_mirroring_endpoint_group_association" + ] + }, + "GetMirroringDeployment": { + "methods": [ + "get_mirroring_deployment" + ] + }, + "GetMirroringDeploymentGroup": { + "methods": [ + "get_mirroring_deployment_group" + ] + }, + "GetMirroringEndpointGroup": { + "methods": [ + "get_mirroring_endpoint_group" + ] + }, + "GetMirroringEndpointGroupAssociation": { + "methods": [ + "get_mirroring_endpoint_group_association" + ] + }, + "ListMirroringDeploymentGroups": { + "methods": [ + "list_mirroring_deployment_groups" + ] + }, + "ListMirroringDeployments": { + "methods": [ + "list_mirroring_deployments" + ] + }, + "ListMirroringEndpointGroupAssociations": { + "methods": [ + "list_mirroring_endpoint_group_associations" + ] + }, + "ListMirroringEndpointGroups": { + "methods": [ + "list_mirroring_endpoint_groups" + ] + }, + "UpdateMirroringDeployment": { + "methods": [ + "update_mirroring_deployment" + ] + }, + "UpdateMirroringDeploymentGroup": { + "methods": [ + "update_mirroring_deployment_group" + ] + }, + "UpdateMirroringEndpointGroup": { + "methods": [ + "update_mirroring_endpoint_group" + ] + }, + "UpdateMirroringEndpointGroupAssociation": { + "methods": [ + "update_mirroring_endpoint_group_association" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MirroringAsyncClient", + "rpcs": { + "CreateMirroringDeployment": { + "methods": [ + "create_mirroring_deployment" + ] + }, + "CreateMirroringDeploymentGroup": { + "methods": [ + "create_mirroring_deployment_group" + ] + }, + "CreateMirroringEndpointGroup": { + "methods": [ + "create_mirroring_endpoint_group" + ] + }, + "CreateMirroringEndpointGroupAssociation": { + "methods": [ + "create_mirroring_endpoint_group_association" + ] + }, + "DeleteMirroringDeployment": { + "methods": [ + "delete_mirroring_deployment" + ] + }, + "DeleteMirroringDeploymentGroup": { + "methods": [ + "delete_mirroring_deployment_group" + ] + }, + "DeleteMirroringEndpointGroup": { + "methods": [ + "delete_mirroring_endpoint_group" + ] + }, + "DeleteMirroringEndpointGroupAssociation": { + "methods": [ + "delete_mirroring_endpoint_group_association" + ] + }, + "GetMirroringDeployment": { + "methods": [ + "get_mirroring_deployment" + ] + }, + "GetMirroringDeploymentGroup": { + "methods": [ + "get_mirroring_deployment_group" + ] + }, + "GetMirroringEndpointGroup": { + "methods": [ + "get_mirroring_endpoint_group" + ] + }, + "GetMirroringEndpointGroupAssociation": { + "methods": [ + "get_mirroring_endpoint_group_association" + ] + }, + "ListMirroringDeploymentGroups": { + "methods": [ + "list_mirroring_deployment_groups" + ] + }, + "ListMirroringDeployments": { + "methods": [ + "list_mirroring_deployments" + ] + }, + "ListMirroringEndpointGroupAssociations": { + "methods": [ + "list_mirroring_endpoint_group_associations" + ] + }, + "ListMirroringEndpointGroups": { + "methods": [ + "list_mirroring_endpoint_groups" + ] + }, + "UpdateMirroringDeployment": { + "methods": [ + "update_mirroring_deployment" + ] + }, + "UpdateMirroringDeploymentGroup": { + "methods": [ + "update_mirroring_deployment_group" + ] + }, + "UpdateMirroringEndpointGroup": { + "methods": [ + "update_mirroring_endpoint_group" + ] + }, + "UpdateMirroringEndpointGroupAssociation": { + "methods": [ + "update_mirroring_endpoint_group_association" + ] + } + } + }, + "rest": { + "libraryClient": "MirroringClient", + "rpcs": { + "CreateMirroringDeployment": { + "methods": [ + "create_mirroring_deployment" + ] + }, + "CreateMirroringDeploymentGroup": { + "methods": [ + "create_mirroring_deployment_group" + ] + }, + "CreateMirroringEndpointGroup": { + "methods": [ + "create_mirroring_endpoint_group" + ] + }, + "CreateMirroringEndpointGroupAssociation": { + "methods": [ + "create_mirroring_endpoint_group_association" + ] + }, + "DeleteMirroringDeployment": { + "methods": [ + "delete_mirroring_deployment" + ] + }, + "DeleteMirroringDeploymentGroup": { + "methods": [ + "delete_mirroring_deployment_group" + ] + }, + "DeleteMirroringEndpointGroup": { + "methods": [ + "delete_mirroring_endpoint_group" + ] + }, + "DeleteMirroringEndpointGroupAssociation": { + "methods": [ + "delete_mirroring_endpoint_group_association" + ] + }, + "GetMirroringDeployment": { + "methods": [ + "get_mirroring_deployment" + ] + }, + "GetMirroringDeploymentGroup": { + "methods": [ + "get_mirroring_deployment_group" + ] + }, + "GetMirroringEndpointGroup": { + "methods": [ + "get_mirroring_endpoint_group" + ] + }, + "GetMirroringEndpointGroupAssociation": { + "methods": [ + "get_mirroring_endpoint_group_association" + ] + }, + "ListMirroringDeploymentGroups": { + "methods": [ + "list_mirroring_deployment_groups" + ] + }, + "ListMirroringDeployments": { + "methods": [ + "list_mirroring_deployments" + ] + }, + "ListMirroringEndpointGroupAssociations": { + "methods": [ + "list_mirroring_endpoint_group_associations" + ] + }, + "ListMirroringEndpointGroups": { + "methods": [ + "list_mirroring_endpoint_groups" + ] + }, + "UpdateMirroringDeployment": { + "methods": [ + "update_mirroring_deployment" + ] + }, + "UpdateMirroringDeploymentGroup": { + "methods": [ + "update_mirroring_deployment_group" + ] + }, + "UpdateMirroringEndpointGroup": { + "methods": [ + "update_mirroring_endpoint_group" + ] + }, + "UpdateMirroringEndpointGroupAssociation": { + "methods": [ + "update_mirroring_endpoint_group_association" + ] + } + } + } + } + }, "NetworkSecurity": { "clients": { "grpc": { - "libraryClient": "NetworkSecurityClient", + "libraryClient": "NetworkSecurityClient", + "rpcs": { + "CreateAuthorizationPolicy": { + "methods": [ + "create_authorization_policy" + ] + }, + "CreateAuthzPolicy": { + "methods": [ + "create_authz_policy" + ] + }, + "CreateBackendAuthenticationConfig": { + "methods": [ + "create_backend_authentication_config" + ] + }, + "CreateClientTlsPolicy": { + "methods": [ + "create_client_tls_policy" + ] + }, + "CreateGatewaySecurityPolicy": { + "methods": [ + "create_gateway_security_policy" + ] + }, + "CreateGatewaySecurityPolicyRule": { + "methods": [ + "create_gateway_security_policy_rule" + ] + }, + "CreateServerTlsPolicy": { + "methods": [ + "create_server_tls_policy" + ] + }, + "CreateTlsInspectionPolicy": { + "methods": [ + "create_tls_inspection_policy" + ] + }, + "CreateUrlList": { + "methods": [ + "create_url_list" + ] + }, + "DeleteAuthorizationPolicy": { + "methods": [ + "delete_authorization_policy" + ] + }, + "DeleteAuthzPolicy": { + "methods": [ + "delete_authz_policy" + ] + }, + "DeleteBackendAuthenticationConfig": { + "methods": [ + "delete_backend_authentication_config" + ] + }, + "DeleteClientTlsPolicy": { + "methods": [ + "delete_client_tls_policy" + ] + }, + "DeleteGatewaySecurityPolicy": { + "methods": [ + "delete_gateway_security_policy" + ] + }, + "DeleteGatewaySecurityPolicyRule": { + "methods": [ + "delete_gateway_security_policy_rule" + ] + }, + "DeleteServerTlsPolicy": { + "methods": [ + "delete_server_tls_policy" + ] + }, + "DeleteTlsInspectionPolicy": { + "methods": [ + "delete_tls_inspection_policy" + ] + }, + "DeleteUrlList": { + "methods": [ + "delete_url_list" + ] + }, + "GetAuthorizationPolicy": { + "methods": [ + "get_authorization_policy" + ] + }, + "GetAuthzPolicy": { + "methods": [ + "get_authz_policy" + ] + }, + "GetBackendAuthenticationConfig": { + "methods": [ + "get_backend_authentication_config" + ] + }, + "GetClientTlsPolicy": { + "methods": [ + "get_client_tls_policy" + ] + }, + "GetGatewaySecurityPolicy": { + "methods": [ + "get_gateway_security_policy" + ] + }, + "GetGatewaySecurityPolicyRule": { + "methods": [ + "get_gateway_security_policy_rule" + ] + }, + "GetServerTlsPolicy": { + "methods": [ + "get_server_tls_policy" + ] + }, + "GetTlsInspectionPolicy": { + "methods": [ + "get_tls_inspection_policy" + ] + }, + "GetUrlList": { + "methods": [ + "get_url_list" + ] + }, + "ListAuthorizationPolicies": { + "methods": [ + "list_authorization_policies" + ] + }, + "ListAuthzPolicies": { + "methods": [ + "list_authz_policies" + ] + }, + "ListBackendAuthenticationConfigs": { + "methods": [ + "list_backend_authentication_configs" + ] + }, + "ListClientTlsPolicies": { + "methods": [ + "list_client_tls_policies" + ] + }, + "ListGatewaySecurityPolicies": { + "methods": [ + "list_gateway_security_policies" + ] + }, + "ListGatewaySecurityPolicyRules": { + "methods": [ + "list_gateway_security_policy_rules" + ] + }, + "ListServerTlsPolicies": { + "methods": [ + "list_server_tls_policies" + ] + }, + "ListTlsInspectionPolicies": { + "methods": [ + "list_tls_inspection_policies" + ] + }, + "ListUrlLists": { + "methods": [ + "list_url_lists" + ] + }, + "UpdateAuthorizationPolicy": { + "methods": [ + "update_authorization_policy" + ] + }, + "UpdateAuthzPolicy": { + "methods": [ + "update_authz_policy" + ] + }, + "UpdateBackendAuthenticationConfig": { + "methods": [ + "update_backend_authentication_config" + ] + }, + "UpdateClientTlsPolicy": { + "methods": [ + "update_client_tls_policy" + ] + }, + "UpdateGatewaySecurityPolicy": { + "methods": [ + "update_gateway_security_policy" + ] + }, + "UpdateGatewaySecurityPolicyRule": { + "methods": [ + "update_gateway_security_policy_rule" + ] + }, + "UpdateServerTlsPolicy": { + "methods": [ + "update_server_tls_policy" + ] + }, + "UpdateTlsInspectionPolicy": { + "methods": [ + "update_tls_inspection_policy" + ] + }, + "UpdateUrlList": { + "methods": [ + "update_url_list" + ] + } + } + }, + "grpc-async": { + "libraryClient": "NetworkSecurityAsyncClient", + "rpcs": { + "CreateAuthorizationPolicy": { + "methods": [ + "create_authorization_policy" + ] + }, + "CreateAuthzPolicy": { + "methods": [ + "create_authz_policy" + ] + }, + "CreateBackendAuthenticationConfig": { + "methods": [ + "create_backend_authentication_config" + ] + }, + "CreateClientTlsPolicy": { + "methods": [ + "create_client_tls_policy" + ] + }, + "CreateGatewaySecurityPolicy": { + "methods": [ + "create_gateway_security_policy" + ] + }, + "CreateGatewaySecurityPolicyRule": { + "methods": [ + "create_gateway_security_policy_rule" + ] + }, + "CreateServerTlsPolicy": { + "methods": [ + "create_server_tls_policy" + ] + }, + "CreateTlsInspectionPolicy": { + "methods": [ + "create_tls_inspection_policy" + ] + }, + "CreateUrlList": { + "methods": [ + "create_url_list" + ] + }, + "DeleteAuthorizationPolicy": { + "methods": [ + "delete_authorization_policy" + ] + }, + "DeleteAuthzPolicy": { + "methods": [ + "delete_authz_policy" + ] + }, + "DeleteBackendAuthenticationConfig": { + "methods": [ + "delete_backend_authentication_config" + ] + }, + "DeleteClientTlsPolicy": { + "methods": [ + "delete_client_tls_policy" + ] + }, + "DeleteGatewaySecurityPolicy": { + "methods": [ + "delete_gateway_security_policy" + ] + }, + "DeleteGatewaySecurityPolicyRule": { + "methods": [ + "delete_gateway_security_policy_rule" + ] + }, + "DeleteServerTlsPolicy": { + "methods": [ + "delete_server_tls_policy" + ] + }, + "DeleteTlsInspectionPolicy": { + "methods": [ + "delete_tls_inspection_policy" + ] + }, + "DeleteUrlList": { + "methods": [ + "delete_url_list" + ] + }, + "GetAuthorizationPolicy": { + "methods": [ + "get_authorization_policy" + ] + }, + "GetAuthzPolicy": { + "methods": [ + "get_authz_policy" + ] + }, + "GetBackendAuthenticationConfig": { + "methods": [ + "get_backend_authentication_config" + ] + }, + "GetClientTlsPolicy": { + "methods": [ + "get_client_tls_policy" + ] + }, + "GetGatewaySecurityPolicy": { + "methods": [ + "get_gateway_security_policy" + ] + }, + "GetGatewaySecurityPolicyRule": { + "methods": [ + "get_gateway_security_policy_rule" + ] + }, + "GetServerTlsPolicy": { + "methods": [ + "get_server_tls_policy" + ] + }, + "GetTlsInspectionPolicy": { + "methods": [ + "get_tls_inspection_policy" + ] + }, + "GetUrlList": { + "methods": [ + "get_url_list" + ] + }, + "ListAuthorizationPolicies": { + "methods": [ + "list_authorization_policies" + ] + }, + "ListAuthzPolicies": { + "methods": [ + "list_authz_policies" + ] + }, + "ListBackendAuthenticationConfigs": { + "methods": [ + "list_backend_authentication_configs" + ] + }, + "ListClientTlsPolicies": { + "methods": [ + "list_client_tls_policies" + ] + }, + "ListGatewaySecurityPolicies": { + "methods": [ + "list_gateway_security_policies" + ] + }, + "ListGatewaySecurityPolicyRules": { + "methods": [ + "list_gateway_security_policy_rules" + ] + }, + "ListServerTlsPolicies": { + "methods": [ + "list_server_tls_policies" + ] + }, + "ListTlsInspectionPolicies": { + "methods": [ + "list_tls_inspection_policies" + ] + }, + "ListUrlLists": { + "methods": [ + "list_url_lists" + ] + }, + "UpdateAuthorizationPolicy": { + "methods": [ + "update_authorization_policy" + ] + }, + "UpdateAuthzPolicy": { + "methods": [ + "update_authz_policy" + ] + }, + "UpdateBackendAuthenticationConfig": { + "methods": [ + "update_backend_authentication_config" + ] + }, + "UpdateClientTlsPolicy": { + "methods": [ + "update_client_tls_policy" + ] + }, + "UpdateGatewaySecurityPolicy": { + "methods": [ + "update_gateway_security_policy" + ] + }, + "UpdateGatewaySecurityPolicyRule": { + "methods": [ + "update_gateway_security_policy_rule" + ] + }, + "UpdateServerTlsPolicy": { + "methods": [ + "update_server_tls_policy" + ] + }, + "UpdateTlsInspectionPolicy": { + "methods": [ + "update_tls_inspection_policy" + ] + }, + "UpdateUrlList": { + "methods": [ + "update_url_list" + ] + } + } + }, + "rest": { + "libraryClient": "NetworkSecurityClient", + "rpcs": { + "CreateAuthorizationPolicy": { + "methods": [ + "create_authorization_policy" + ] + }, + "CreateAuthzPolicy": { + "methods": [ + "create_authz_policy" + ] + }, + "CreateBackendAuthenticationConfig": { + "methods": [ + "create_backend_authentication_config" + ] + }, + "CreateClientTlsPolicy": { + "methods": [ + "create_client_tls_policy" + ] + }, + "CreateGatewaySecurityPolicy": { + "methods": [ + "create_gateway_security_policy" + ] + }, + "CreateGatewaySecurityPolicyRule": { + "methods": [ + "create_gateway_security_policy_rule" + ] + }, + "CreateServerTlsPolicy": { + "methods": [ + "create_server_tls_policy" + ] + }, + "CreateTlsInspectionPolicy": { + "methods": [ + "create_tls_inspection_policy" + ] + }, + "CreateUrlList": { + "methods": [ + "create_url_list" + ] + }, + "DeleteAuthorizationPolicy": { + "methods": [ + "delete_authorization_policy" + ] + }, + "DeleteAuthzPolicy": { + "methods": [ + "delete_authz_policy" + ] + }, + "DeleteBackendAuthenticationConfig": { + "methods": [ + "delete_backend_authentication_config" + ] + }, + "DeleteClientTlsPolicy": { + "methods": [ + "delete_client_tls_policy" + ] + }, + "DeleteGatewaySecurityPolicy": { + "methods": [ + "delete_gateway_security_policy" + ] + }, + "DeleteGatewaySecurityPolicyRule": { + "methods": [ + "delete_gateway_security_policy_rule" + ] + }, + "DeleteServerTlsPolicy": { + "methods": [ + "delete_server_tls_policy" + ] + }, + "DeleteTlsInspectionPolicy": { + "methods": [ + "delete_tls_inspection_policy" + ] + }, + "DeleteUrlList": { + "methods": [ + "delete_url_list" + ] + }, + "GetAuthorizationPolicy": { + "methods": [ + "get_authorization_policy" + ] + }, + "GetAuthzPolicy": { + "methods": [ + "get_authz_policy" + ] + }, + "GetBackendAuthenticationConfig": { + "methods": [ + "get_backend_authentication_config" + ] + }, + "GetClientTlsPolicy": { + "methods": [ + "get_client_tls_policy" + ] + }, + "GetGatewaySecurityPolicy": { + "methods": [ + "get_gateway_security_policy" + ] + }, + "GetGatewaySecurityPolicyRule": { + "methods": [ + "get_gateway_security_policy_rule" + ] + }, + "GetServerTlsPolicy": { + "methods": [ + "get_server_tls_policy" + ] + }, + "GetTlsInspectionPolicy": { + "methods": [ + "get_tls_inspection_policy" + ] + }, + "GetUrlList": { + "methods": [ + "get_url_list" + ] + }, + "ListAuthorizationPolicies": { + "methods": [ + "list_authorization_policies" + ] + }, + "ListAuthzPolicies": { + "methods": [ + "list_authz_policies" + ] + }, + "ListBackendAuthenticationConfigs": { + "methods": [ + "list_backend_authentication_configs" + ] + }, + "ListClientTlsPolicies": { + "methods": [ + "list_client_tls_policies" + ] + }, + "ListGatewaySecurityPolicies": { + "methods": [ + "list_gateway_security_policies" + ] + }, + "ListGatewaySecurityPolicyRules": { + "methods": [ + "list_gateway_security_policy_rules" + ] + }, + "ListServerTlsPolicies": { + "methods": [ + "list_server_tls_policies" + ] + }, + "ListTlsInspectionPolicies": { + "methods": [ + "list_tls_inspection_policies" + ] + }, + "ListUrlLists": { + "methods": [ + "list_url_lists" + ] + }, + "UpdateAuthorizationPolicy": { + "methods": [ + "update_authorization_policy" + ] + }, + "UpdateAuthzPolicy": { + "methods": [ + "update_authz_policy" + ] + }, + "UpdateBackendAuthenticationConfig": { + "methods": [ + "update_backend_authentication_config" + ] + }, + "UpdateClientTlsPolicy": { + "methods": [ + "update_client_tls_policy" + ] + }, + "UpdateGatewaySecurityPolicy": { + "methods": [ + "update_gateway_security_policy" + ] + }, + "UpdateGatewaySecurityPolicyRule": { + "methods": [ + "update_gateway_security_policy_rule" + ] + }, + "UpdateServerTlsPolicy": { + "methods": [ + "update_server_tls_policy" + ] + }, + "UpdateTlsInspectionPolicy": { + "methods": [ + "update_tls_inspection_policy" + ] + }, + "UpdateUrlList": { + "methods": [ + "update_url_list" + ] + } + } + } + } + }, + "OrganizationSecurityProfileGroupService": { + "clients": { + "grpc": { + "libraryClient": "OrganizationSecurityProfileGroupServiceClient", + "rpcs": { + "CreateSecurityProfile": { + "methods": [ + "create_security_profile" + ] + }, + "CreateSecurityProfileGroup": { + "methods": [ + "create_security_profile_group" + ] + }, + "DeleteSecurityProfile": { + "methods": [ + "delete_security_profile" + ] + }, + "DeleteSecurityProfileGroup": { + "methods": [ + "delete_security_profile_group" + ] + }, + "GetSecurityProfile": { + "methods": [ + "get_security_profile" + ] + }, + "GetSecurityProfileGroup": { + "methods": [ + "get_security_profile_group" + ] + }, + "ListSecurityProfileGroups": { + "methods": [ + "list_security_profile_groups" + ] + }, + "ListSecurityProfiles": { + "methods": [ + "list_security_profiles" + ] + }, + "UpdateSecurityProfile": { + "methods": [ + "update_security_profile" + ] + }, + "UpdateSecurityProfileGroup": { + "methods": [ + "update_security_profile_group" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OrganizationSecurityProfileGroupServiceAsyncClient", + "rpcs": { + "CreateSecurityProfile": { + "methods": [ + "create_security_profile" + ] + }, + "CreateSecurityProfileGroup": { + "methods": [ + "create_security_profile_group" + ] + }, + "DeleteSecurityProfile": { + "methods": [ + "delete_security_profile" + ] + }, + "DeleteSecurityProfileGroup": { + "methods": [ + "delete_security_profile_group" + ] + }, + "GetSecurityProfile": { + "methods": [ + "get_security_profile" + ] + }, + "GetSecurityProfileGroup": { + "methods": [ + "get_security_profile_group" + ] + }, + "ListSecurityProfileGroups": { + "methods": [ + "list_security_profile_groups" + ] + }, + "ListSecurityProfiles": { + "methods": [ + "list_security_profiles" + ] + }, + "UpdateSecurityProfile": { + "methods": [ + "update_security_profile" + ] + }, + "UpdateSecurityProfileGroup": { + "methods": [ + "update_security_profile_group" + ] + } + } + }, + "rest": { + "libraryClient": "OrganizationSecurityProfileGroupServiceClient", + "rpcs": { + "CreateSecurityProfile": { + "methods": [ + "create_security_profile" + ] + }, + "CreateSecurityProfileGroup": { + "methods": [ + "create_security_profile_group" + ] + }, + "DeleteSecurityProfile": { + "methods": [ + "delete_security_profile" + ] + }, + "DeleteSecurityProfileGroup": { + "methods": [ + "delete_security_profile_group" + ] + }, + "GetSecurityProfile": { + "methods": [ + "get_security_profile" + ] + }, + "GetSecurityProfileGroup": { + "methods": [ + "get_security_profile_group" + ] + }, + "ListSecurityProfileGroups": { + "methods": [ + "list_security_profile_groups" + ] + }, + "ListSecurityProfiles": { + "methods": [ + "list_security_profiles" + ] + }, + "UpdateSecurityProfile": { + "methods": [ + "update_security_profile" + ] + }, + "UpdateSecurityProfileGroup": { + "methods": [ + "update_security_profile_group" + ] + } + } + } + } + }, + "SSEGatewayService": { + "clients": { + "grpc": { + "libraryClient": "SSEGatewayServiceClient", "rpcs": { - "CreateClientTlsPolicy": { + "CreatePartnerSSEGateway": { "methods": [ - "create_client_tls_policy" + "create_partner_sse_gateway" ] }, - "DeleteClientTlsPolicy": { + "DeletePartnerSSEGateway": { "methods": [ - "delete_client_tls_policy" + "delete_partner_sse_gateway" ] }, - "GetClientTlsPolicy": { + "GetPartnerSSEGateway": { "methods": [ - "get_client_tls_policy" + "get_partner_sse_gateway" ] }, - "ListClientTlsPolicies": { + "GetSSEGatewayReference": { "methods": [ - "list_client_tls_policies" + "get_sse_gateway_reference" ] }, - "UpdateClientTlsPolicy": { + "ListPartnerSSEGateways": { "methods": [ - "update_client_tls_policy" + "list_partner_sse_gateways" + ] + }, + "ListSSEGatewayReferences": { + "methods": [ + "list_sse_gateway_references" + ] + }, + "UpdatePartnerSSEGateway": { + "methods": [ + "update_partner_sse_gateway" ] } } }, "grpc-async": { - "libraryClient": "NetworkSecurityAsyncClient", + "libraryClient": "SSEGatewayServiceAsyncClient", "rpcs": { - "CreateClientTlsPolicy": { + "CreatePartnerSSEGateway": { "methods": [ - "create_client_tls_policy" + "create_partner_sse_gateway" ] }, - "DeleteClientTlsPolicy": { + "DeletePartnerSSEGateway": { "methods": [ - "delete_client_tls_policy" + "delete_partner_sse_gateway" ] }, - "GetClientTlsPolicy": { + "GetPartnerSSEGateway": { "methods": [ - "get_client_tls_policy" + "get_partner_sse_gateway" ] }, - "ListClientTlsPolicies": { + "GetSSEGatewayReference": { "methods": [ - "list_client_tls_policies" + "get_sse_gateway_reference" ] }, - "UpdateClientTlsPolicy": { + "ListPartnerSSEGateways": { "methods": [ - "update_client_tls_policy" + "list_partner_sse_gateways" + ] + }, + "ListSSEGatewayReferences": { + "methods": [ + "list_sse_gateway_references" + ] + }, + "UpdatePartnerSSEGateway": { + "methods": [ + "update_partner_sse_gateway" ] } } }, "rest": { - "libraryClient": "NetworkSecurityClient", + "libraryClient": "SSEGatewayServiceClient", "rpcs": { - "CreateClientTlsPolicy": { + "CreatePartnerSSEGateway": { "methods": [ - "create_client_tls_policy" + "create_partner_sse_gateway" ] }, - "DeleteClientTlsPolicy": { + "DeletePartnerSSEGateway": { "methods": [ - "delete_client_tls_policy" + "delete_partner_sse_gateway" ] }, - "GetClientTlsPolicy": { + "GetPartnerSSEGateway": { "methods": [ - "get_client_tls_policy" + "get_partner_sse_gateway" ] }, - "ListClientTlsPolicies": { + "GetSSEGatewayReference": { "methods": [ - "list_client_tls_policies" + "get_sse_gateway_reference" ] }, - "UpdateClientTlsPolicy": { + "ListPartnerSSEGateways": { "methods": [ - "update_client_tls_policy" + "list_partner_sse_gateways" + ] + }, + "ListSSEGatewayReferences": { + "methods": [ + "list_sse_gateway_references" + ] + }, + "UpdatePartnerSSEGateway": { + "methods": [ + "update_partner_sse_gateway" + ] + } + } + } + } + }, + "SSERealmService": { + "clients": { + "grpc": { + "libraryClient": "SSERealmServiceClient", + "rpcs": { + "CreatePartnerSSERealm": { + "methods": [ + "create_partner_sse_realm" + ] + }, + "CreateSACAttachment": { + "methods": [ + "create_sac_attachment" + ] + }, + "CreateSACRealm": { + "methods": [ + "create_sac_realm" + ] + }, + "DeletePartnerSSERealm": { + "methods": [ + "delete_partner_sse_realm" + ] + }, + "DeleteSACAttachment": { + "methods": [ + "delete_sac_attachment" + ] + }, + "DeleteSACRealm": { + "methods": [ + "delete_sac_realm" + ] + }, + "GetPartnerSSERealm": { + "methods": [ + "get_partner_sse_realm" + ] + }, + "GetSACAttachment": { + "methods": [ + "get_sac_attachment" + ] + }, + "GetSACRealm": { + "methods": [ + "get_sac_realm" + ] + }, + "ListPartnerSSERealms": { + "methods": [ + "list_partner_sse_realms" + ] + }, + "ListSACAttachments": { + "methods": [ + "list_sac_attachments" + ] + }, + "ListSACRealms": { + "methods": [ + "list_sac_realms" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SSERealmServiceAsyncClient", + "rpcs": { + "CreatePartnerSSERealm": { + "methods": [ + "create_partner_sse_realm" + ] + }, + "CreateSACAttachment": { + "methods": [ + "create_sac_attachment" + ] + }, + "CreateSACRealm": { + "methods": [ + "create_sac_realm" + ] + }, + "DeletePartnerSSERealm": { + "methods": [ + "delete_partner_sse_realm" + ] + }, + "DeleteSACAttachment": { + "methods": [ + "delete_sac_attachment" + ] + }, + "DeleteSACRealm": { + "methods": [ + "delete_sac_realm" + ] + }, + "GetPartnerSSERealm": { + "methods": [ + "get_partner_sse_realm" + ] + }, + "GetSACAttachment": { + "methods": [ + "get_sac_attachment" + ] + }, + "GetSACRealm": { + "methods": [ + "get_sac_realm" + ] + }, + "ListPartnerSSERealms": { + "methods": [ + "list_partner_sse_realms" + ] + }, + "ListSACAttachments": { + "methods": [ + "list_sac_attachments" + ] + }, + "ListSACRealms": { + "methods": [ + "list_sac_realms" + ] + } + } + }, + "rest": { + "libraryClient": "SSERealmServiceClient", + "rpcs": { + "CreatePartnerSSERealm": { + "methods": [ + "create_partner_sse_realm" + ] + }, + "CreateSACAttachment": { + "methods": [ + "create_sac_attachment" + ] + }, + "CreateSACRealm": { + "methods": [ + "create_sac_realm" + ] + }, + "DeletePartnerSSERealm": { + "methods": [ + "delete_partner_sse_realm" + ] + }, + "DeleteSACAttachment": { + "methods": [ + "delete_sac_attachment" + ] + }, + "DeleteSACRealm": { + "methods": [ + "delete_sac_realm" + ] + }, + "GetPartnerSSERealm": { + "methods": [ + "get_partner_sse_realm" + ] + }, + "GetSACAttachment": { + "methods": [ + "get_sac_attachment" + ] + }, + "GetSACRealm": { + "methods": [ + "get_sac_realm" + ] + }, + "ListPartnerSSERealms": { + "methods": [ + "list_partner_sse_realms" + ] + }, + "ListSACAttachments": { + "methods": [ + "list_sac_attachments" + ] + }, + "ListSACRealms": { + "methods": [ + "list_sac_realms" ] } } diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/__init__.py new file mode 100644 index 000000000000..89757fff72c5 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DnsThreatDetectorServiceAsyncClient +from .client import DnsThreatDetectorServiceClient + +__all__ = ( + "DnsThreatDetectorServiceClient", + "DnsThreatDetectorServiceAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/async_client.py new file mode 100644 index 000000000000..7aa21c9b3f15 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/async_client.py @@ -0,0 +1,1596 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.dns_threat_detector_service import ( + pagers, +) +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .client import DnsThreatDetectorServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .transports.grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class DnsThreatDetectorServiceAsyncClient: + """The Network Security API for DNS Threat Detectors.""" + + _client: DnsThreatDetectorServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DnsThreatDetectorServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + + dns_threat_detector_path = staticmethod( + DnsThreatDetectorServiceClient.dns_threat_detector_path + ) + parse_dns_threat_detector_path = staticmethod( + DnsThreatDetectorServiceClient.parse_dns_threat_detector_path + ) + network_path = staticmethod(DnsThreatDetectorServiceClient.network_path) + parse_network_path = staticmethod(DnsThreatDetectorServiceClient.parse_network_path) + common_billing_account_path = staticmethod( + DnsThreatDetectorServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DnsThreatDetectorServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DnsThreatDetectorServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + DnsThreatDetectorServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + DnsThreatDetectorServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceAsyncClient: The constructed client. + """ + return DnsThreatDetectorServiceClient.from_service_account_info.__func__(DnsThreatDetectorServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceAsyncClient: The constructed client. + """ + return DnsThreatDetectorServiceClient.from_service_account_file.__func__(DnsThreatDetectorServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DnsThreatDetectorServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DnsThreatDetectorServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DnsThreatDetectorServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DnsThreatDetectorServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DnsThreatDetectorServiceTransport, + Callable[..., DnsThreatDetectorServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dns threat detector service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DnsThreatDetectorServiceTransport,Callable[..., DnsThreatDetectorServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DnsThreatDetectorServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DnsThreatDetectorServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "credentialsType": None, + }, + ) + + async def list_dns_threat_detectors( + self, + request: Optional[ + Union[dns_threat_detector.ListDnsThreatDetectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDnsThreatDetectorsAsyncPager: + r"""Lists DnsThreatDetectors in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest, dict]]): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + parent (:class:`str`): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsAsyncPager: + The response message to requesting a + list of DnsThreatDetectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.ListDnsThreatDetectorsRequest): + request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_dns_threat_detectors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDnsThreatDetectorsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.GetDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Gets the details of a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetDnsThreatDetectorRequest, dict]]): + The request object. The message sent to get a + DnsThreatDetector. + name (:class:`str`): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.GetDnsThreatDetectorRequest): + request = dns_threat_detector.GetDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + dns_threat_detector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Creates a new DnsThreatDetector in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateDnsThreatDetectorRequest, dict]]): + The request object. The message to create a + DnsThreatDetector. + parent (:class:`str`): + Required. The value for the parent of + the DnsThreatDetector resource. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector (:class:`google.cloud.network_security_v1alpha1.types.DnsThreatDetector`): + Required. The ``DnsThreatDetector`` resource to create. + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector_id (:class:`str`): + Optional. The ID of the requesting + DnsThreatDetector object. If this field + is not supplied, the service generates + an identifier. + + This corresponds to the ``dns_threat_detector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, dns_threat_detector, dns_threat_detector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.CreateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if dns_threat_detector_id is not None: + request.dns_threat_detector_id = dns_threat_detector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, dict] + ] = None, + *, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Updates a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateDnsThreatDetectorRequest, dict]]): + The request object. The message for updating a + DnsThreatDetector. + dns_threat_detector (:class:`google.cloud.network_security_v1alpha1.types.DnsThreatDetector`): + Required. The DnsThreatDetector + resource being updated. + + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The field mask is used to specify the fields + to be overwritten in the DnsThreatDetector resource by + the update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the mask is + not provided then all fields present in the request will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [dns_threat_detector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_threat_detector.name", request.dns_threat_detector.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.DeleteDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + await client.delete_dns_threat_detector(request=request) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteDnsThreatDetectorRequest, dict]]): + The request object. The message for deleting a + DnsThreatDetector. + name (:class:`str`): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.DeleteDnsThreatDetectorRequest): + request = dns_threat_detector.DeleteDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DnsThreatDetectorServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("DnsThreatDetectorServiceAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/client.py new file mode 100644 index 000000000000..ef4700f3ef6a --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/client.py @@ -0,0 +1,2056 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.dns_threat_detector_service import ( + pagers, +) +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .transports.base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .transports.grpc import DnsThreatDetectorServiceGrpcTransport +from .transports.grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport +from .transports.rest import DnsThreatDetectorServiceRestTransport + + +class DnsThreatDetectorServiceClientMeta(type): + """Metaclass for the DnsThreatDetectorService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DnsThreatDetectorServiceTransport]] + _transport_registry["grpc"] = DnsThreatDetectorServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DnsThreatDetectorServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DnsThreatDetectorServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DnsThreatDetectorServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DnsThreatDetectorServiceClient(metaclass=DnsThreatDetectorServiceClientMeta): + """The Network Security API for DNS Threat Detectors.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DnsThreatDetectorServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DnsThreatDetectorServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def dns_threat_detector_path( + project: str, + location: str, + dns_threat_detector: str, + ) -> str: + """Returns a fully-qualified dns_threat_detector string.""" + return "projects/{project}/locations/{location}/dnsThreatDetectors/{dns_threat_detector}".format( + project=project, + location=location, + dns_threat_detector=dns_threat_detector, + ) + + @staticmethod + def parse_dns_threat_detector_path(path: str) -> Dict[str, str]: + """Parses a dns_threat_detector path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dnsThreatDetectors/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DnsThreatDetectorServiceTransport, + Callable[..., DnsThreatDetectorServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dns threat detector service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DnsThreatDetectorServiceTransport,Callable[..., DnsThreatDetectorServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DnsThreatDetectorServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DnsThreatDetectorServiceClient._read_environment_variables() + self._client_cert_source = ( + DnsThreatDetectorServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = DnsThreatDetectorServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DnsThreatDetectorServiceTransport) + if transport_provided: + # transport is a DnsThreatDetectorServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DnsThreatDetectorServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DnsThreatDetectorServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DnsThreatDetectorServiceTransport], + Callable[..., DnsThreatDetectorServiceTransport], + ] = ( + DnsThreatDetectorServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DnsThreatDetectorServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "credentialsType": None, + }, + ) + + def list_dns_threat_detectors( + self, + request: Optional[ + Union[dns_threat_detector.ListDnsThreatDetectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDnsThreatDetectorsPager: + r"""Lists DnsThreatDetectors in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest, dict]): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + parent (str): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsPager: + The response message to requesting a + list of DnsThreatDetectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.ListDnsThreatDetectorsRequest): + request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_dns_threat_detectors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDnsThreatDetectorsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.GetDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Gets the details of a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetDnsThreatDetectorRequest, dict]): + The request object. The message sent to get a + DnsThreatDetector. + name (str): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.GetDnsThreatDetectorRequest): + request = dns_threat_detector.GetDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dns_threat_detector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + dns_threat_detector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Creates a new DnsThreatDetector in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateDnsThreatDetectorRequest, dict]): + The request object. The message to create a + DnsThreatDetector. + parent (str): + Required. The value for the parent of + the DnsThreatDetector resource. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector (google.cloud.network_security_v1alpha1.types.DnsThreatDetector): + Required. The ``DnsThreatDetector`` resource to create. + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector_id (str): + Optional. The ID of the requesting + DnsThreatDetector object. If this field + is not supplied, the service generates + an identifier. + + This corresponds to the ``dns_threat_detector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, dns_threat_detector, dns_threat_detector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.CreateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if dns_threat_detector_id is not None: + request.dns_threat_detector_id = dns_threat_detector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, dict] + ] = None, + *, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Updates a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateDnsThreatDetectorRequest, dict]): + The request object. The message for updating a + DnsThreatDetector. + dns_threat_detector (google.cloud.network_security_v1alpha1.types.DnsThreatDetector): + Required. The DnsThreatDetector + resource being updated. + + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The field mask is used to specify the fields + to be overwritten in the DnsThreatDetector resource by + the update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the mask is + not provided then all fields present in the request will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [dns_threat_detector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_threat_detector.name", request.dns_threat_detector.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.DeleteDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + client.delete_dns_threat_detector(request=request) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteDnsThreatDetectorRequest, dict]): + The request object. The message for deleting a + DnsThreatDetector. + name (str): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.DeleteDnsThreatDetectorRequest): + request = dns_threat_detector.DeleteDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DnsThreatDetectorServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("DnsThreatDetectorServiceClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/pagers.py new file mode 100644 index 000000000000..e30ae2f5b64c --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/pagers.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + + +class ListDnsThreatDetectorsPager: + """A pager for iterating through ``list_dns_threat_detectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``dns_threat_detectors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDnsThreatDetectors`` requests and continue to iterate + through the ``dns_threat_detectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dns_threat_detector.ListDnsThreatDetectorsResponse], + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dns_threat_detector.ListDnsThreatDetectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[dns_threat_detector.DnsThreatDetector]: + for page in self.pages: + yield from page.dns_threat_detectors + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDnsThreatDetectorsAsyncPager: + """A pager for iterating through ``list_dns_threat_detectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``dns_threat_detectors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDnsThreatDetectors`` requests and continue to iterate + through the ``dns_threat_detectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse] + ], + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[dns_threat_detector.ListDnsThreatDetectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[dns_threat_detector.DnsThreatDetector]: + async def async_generator(): + async for page in self.pages: + for response in page.dns_threat_detectors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/README.rst new file mode 100644 index 000000000000..bccbb6f152a3 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DnsThreatDetectorServiceTransport` is the ABC for all transports. +- public child `DnsThreatDetectorServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DnsThreatDetectorServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDnsThreatDetectorServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DnsThreatDetectorServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/__init__.py new file mode 100644 index 000000000000..963a0f444a76 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DnsThreatDetectorServiceTransport +from .grpc import DnsThreatDetectorServiceGrpcTransport +from .grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport +from .rest import ( + DnsThreatDetectorServiceRestInterceptor, + DnsThreatDetectorServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DnsThreatDetectorServiceTransport]] +_transport_registry["grpc"] = DnsThreatDetectorServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DnsThreatDetectorServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DnsThreatDetectorServiceRestTransport + +__all__ = ( + "DnsThreatDetectorServiceTransport", + "DnsThreatDetectorServiceGrpcTransport", + "DnsThreatDetectorServiceGrpcAsyncIOTransport", + "DnsThreatDetectorServiceRestTransport", + "DnsThreatDetectorServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/base.py new file mode 100644 index 000000000000..aff83466447f --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/base.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DnsThreatDetectorServiceTransport(abc.ABC): + """Abstract transport class for DnsThreatDetectorService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_dns_threat_detectors: gapic_v1.method.wrap_method( + self.list_dns_threat_detectors, + default_timeout=None, + client_info=client_info, + ), + self.get_dns_threat_detector: gapic_v1.method.wrap_method( + self.get_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.create_dns_threat_detector: gapic_v1.method.wrap_method( + self.create_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.update_dns_threat_detector: gapic_v1.method.wrap_method( + self.update_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.delete_dns_threat_detector: gapic_v1.method.wrap_method( + self.delete_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + Union[ + dns_threat_detector.ListDnsThreatDetectorsResponse, + Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + Union[ + dns_threat_detector.DnsThreatDetector, + Awaitable[dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + Union[ + gcn_dns_threat_detector.DnsThreatDetector, + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + Union[ + gcn_dns_threat_detector.DnsThreatDetector, + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DnsThreatDetectorServiceTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc.py new file mode 100644 index 000000000000..0f578b9b0a2f --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DnsThreatDetectorServiceGrpcTransport(DnsThreatDetectorServiceTransport): + """gRPC backend transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + dns_threat_detector.ListDnsThreatDetectorsResponse, + ]: + r"""Return a callable for the list dns threat detectors method over gRPC. + + Lists DnsThreatDetectors in a given project and + location. + + Returns: + Callable[[~.ListDnsThreatDetectorsRequest], + ~.ListDnsThreatDetectorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dns_threat_detectors" not in self._stubs: + self._stubs["list_dns_threat_detectors"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/ListDnsThreatDetectors", + request_serializer=dns_threat_detector.ListDnsThreatDetectorsRequest.serialize, + response_deserializer=dns_threat_detector.ListDnsThreatDetectorsResponse.deserialize, + ) + return self._stubs["list_dns_threat_detectors"] + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the get dns threat detector method over gRPC. + + Gets the details of a single DnsThreatDetector. + + Returns: + Callable[[~.GetDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_threat_detector" not in self._stubs: + self._stubs["get_dns_threat_detector"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/GetDnsThreatDetector", + request_serializer=dns_threat_detector.GetDnsThreatDetectorRequest.serialize, + response_deserializer=dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["get_dns_threat_detector"] + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the create dns threat detector method over gRPC. + + Creates a new DnsThreatDetector in a given project + and location. + + Returns: + Callable[[~.CreateDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dns_threat_detector" not in self._stubs: + self._stubs[ + "create_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/CreateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["create_dns_threat_detector"] + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the update dns threat detector method over gRPC. + + Updates a single DnsThreatDetector. + + Returns: + Callable[[~.UpdateDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dns_threat_detector" not in self._stubs: + self._stubs[ + "update_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/UpdateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["update_dns_threat_detector"] + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete dns threat detector method over gRPC. + + Deletes a single DnsThreatDetector. + + Returns: + Callable[[~.DeleteDnsThreatDetectorRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dns_threat_detector" not in self._stubs: + self._stubs[ + "delete_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/DeleteDnsThreatDetector", + request_serializer=dns_threat_detector.DeleteDnsThreatDetectorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dns_threat_detector"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DnsThreatDetectorServiceGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..069ed5f5bebd --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/grpc_asyncio.py @@ -0,0 +1,766 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .grpc import DnsThreatDetectorServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DnsThreatDetectorServiceGrpcAsyncIOTransport(DnsThreatDetectorServiceTransport): + """gRPC AsyncIO backend transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse], + ]: + r"""Return a callable for the list dns threat detectors method over gRPC. + + Lists DnsThreatDetectors in a given project and + location. + + Returns: + Callable[[~.ListDnsThreatDetectorsRequest], + Awaitable[~.ListDnsThreatDetectorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dns_threat_detectors" not in self._stubs: + self._stubs["list_dns_threat_detectors"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/ListDnsThreatDetectors", + request_serializer=dns_threat_detector.ListDnsThreatDetectorsRequest.serialize, + response_deserializer=dns_threat_detector.ListDnsThreatDetectorsResponse.deserialize, + ) + return self._stubs["list_dns_threat_detectors"] + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + Awaitable[dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the get dns threat detector method over gRPC. + + Gets the details of a single DnsThreatDetector. + + Returns: + Callable[[~.GetDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_threat_detector" not in self._stubs: + self._stubs["get_dns_threat_detector"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/GetDnsThreatDetector", + request_serializer=dns_threat_detector.GetDnsThreatDetectorRequest.serialize, + response_deserializer=dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["get_dns_threat_detector"] + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the create dns threat detector method over gRPC. + + Creates a new DnsThreatDetector in a given project + and location. + + Returns: + Callable[[~.CreateDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dns_threat_detector" not in self._stubs: + self._stubs[ + "create_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/CreateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["create_dns_threat_detector"] + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the update dns threat detector method over gRPC. + + Updates a single DnsThreatDetector. + + Returns: + Callable[[~.UpdateDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dns_threat_detector" not in self._stubs: + self._stubs[ + "update_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/UpdateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["update_dns_threat_detector"] + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete dns threat detector method over gRPC. + + Deletes a single DnsThreatDetector. + + Returns: + Callable[[~.DeleteDnsThreatDetectorRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dns_threat_detector" not in self._stubs: + self._stubs[ + "delete_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService/DeleteDnsThreatDetector", + request_serializer=dns_threat_detector.DeleteDnsThreatDetectorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dns_threat_detector"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_dns_threat_detectors: self._wrap_method( + self.list_dns_threat_detectors, + default_timeout=None, + client_info=client_info, + ), + self.get_dns_threat_detector: self._wrap_method( + self.get_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.create_dns_threat_detector: self._wrap_method( + self.create_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.update_dns_threat_detector: self._wrap_method( + self.update_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.delete_dns_threat_detector: self._wrap_method( + self.delete_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("DnsThreatDetectorServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest.py new file mode 100644 index 000000000000..0e08e18e6968 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest.py @@ -0,0 +1,2712 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseDnsThreatDetectorServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DnsThreatDetectorServiceRestInterceptor: + """Interceptor for DnsThreatDetectorService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DnsThreatDetectorServiceRestTransport. + + .. code-block:: python + class MyCustomDnsThreatDetectorServiceInterceptor(DnsThreatDetectorServiceRestInterceptor): + def pre_create_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dns_threat_detectors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dns_threat_detectors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DnsThreatDetectorServiceRestTransport(interceptor=MyCustomDnsThreatDetectorServiceInterceptor()) + client = DnsThreatDetectorServiceClient(transport=transport) + + + """ + + def pre_create_dns_threat_detector( + self, + request: gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_create_dns_threat_detector( + self, response: gcn_dns_threat_detector.DnsThreatDetector + ) -> gcn_dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for create_dns_threat_detector + + DEPRECATED. Please use the `post_create_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_create_dns_threat_detector` interceptor runs + before the `post_create_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_create_dns_threat_detector_with_metadata( + self, + response: gcn_dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.DnsThreatDetector, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_create_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_create_dns_threat_detector` interceptor. + When both interceptors are used, this `post_create_dns_threat_detector_with_metadata` interceptor runs after the + `post_create_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_create_dns_threat_detector` will be passed to + `post_create_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_delete_dns_threat_detector( + self, + request: dns_threat_detector.DeleteDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def pre_get_dns_threat_detector( + self, + request: dns_threat_detector.GetDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.GetDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_dns_threat_detector( + self, response: dns_threat_detector.DnsThreatDetector + ) -> dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for get_dns_threat_detector + + DEPRECATED. Please use the `post_get_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_get_dns_threat_detector` interceptor runs + before the `post_get_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_get_dns_threat_detector_with_metadata( + self, + response: dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.DnsThreatDetector, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_get_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_get_dns_threat_detector` interceptor. + When both interceptors are used, this `post_get_dns_threat_detector_with_metadata` interceptor runs after the + `post_get_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_get_dns_threat_detector` will be passed to + `post_get_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_list_dns_threat_detectors( + self, + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.ListDnsThreatDetectorsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_dns_threat_detectors + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_dns_threat_detectors( + self, response: dns_threat_detector.ListDnsThreatDetectorsResponse + ) -> dns_threat_detector.ListDnsThreatDetectorsResponse: + """Post-rpc interceptor for list_dns_threat_detectors + + DEPRECATED. Please use the `post_list_dns_threat_detectors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_list_dns_threat_detectors` interceptor runs + before the `post_list_dns_threat_detectors_with_metadata` interceptor. + """ + return response + + def post_list_dns_threat_detectors_with_metadata( + self, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.ListDnsThreatDetectorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_dns_threat_detectors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_list_dns_threat_detectors_with_metadata` + interceptor in new development instead of the `post_list_dns_threat_detectors` interceptor. + When both interceptors are used, this `post_list_dns_threat_detectors_with_metadata` interceptor runs after the + `post_list_dns_threat_detectors` interceptor. The (possibly modified) response returned by + `post_list_dns_threat_detectors` will be passed to + `post_list_dns_threat_detectors_with_metadata`. + """ + return response, metadata + + def pre_update_dns_threat_detector( + self, + request: gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_update_dns_threat_detector( + self, response: gcn_dns_threat_detector.DnsThreatDetector + ) -> gcn_dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for update_dns_threat_detector + + DEPRECATED. Please use the `post_update_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_update_dns_threat_detector` interceptor runs + before the `post_update_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_update_dns_threat_detector_with_metadata( + self, + response: gcn_dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.DnsThreatDetector, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_update_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_update_dns_threat_detector` interceptor. + When both interceptors are used, this `post_update_dns_threat_detector_with_metadata` interceptor runs after the + `post_update_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_update_dns_threat_detector` will be passed to + `post_update_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DnsThreatDetectorServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DnsThreatDetectorServiceRestInterceptor + + +class DnsThreatDetectorServiceRestTransport(_BaseDnsThreatDetectorServiceRestTransport): + """REST backend synchronous transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DnsThreatDetectorServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DnsThreatDetectorServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.CreateDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Call the create dns threat + detector method over HTTP. + + Args: + request (~.gcn_dns_threat_detector.CreateDnsThreatDetectorRequest): + The request object. The message to create a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcn_dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.CreateDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "CreateDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._CreateDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcn_dns_threat_detector.DnsThreatDetector() + pb_resp = gcn_dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gcn_dns_threat_detector.DnsThreatDetector.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.create_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "CreateDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.DeleteDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.DeleteDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete dns threat + detector method over HTTP. + + Args: + request (~.dns_threat_detector.DeleteDnsThreatDetectorRequest): + The request object. The message for deleting a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.DeleteDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "DeleteDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._DeleteDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.GetDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Call the get dns threat detector method over HTTP. + + Args: + request (~.dns_threat_detector.GetDnsThreatDetectorRequest): + The request object. The message sent to get a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.GetDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._GetDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dns_threat_detector.DnsThreatDetector() + pb_resp = dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = dns_threat_detector.DnsThreatDetector.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.get_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDnsThreatDetectors( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListDnsThreatDetectors") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.ListDnsThreatDetectorsResponse: + r"""Call the list dns threat detectors method over HTTP. + + Args: + request (~.dns_threat_detector.ListDnsThreatDetectorsRequest): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dns_threat_detector.ListDnsThreatDetectorsResponse: + The response message to requesting a + list of DnsThreatDetectors. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_dns_threat_detectors( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.ListDnsThreatDetectors", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListDnsThreatDetectors", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._ListDnsThreatDetectors._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dns_threat_detector.ListDnsThreatDetectorsResponse() + pb_resp = dns_threat_detector.ListDnsThreatDetectorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_dns_threat_detectors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dns_threat_detectors_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + dns_threat_detector.ListDnsThreatDetectorsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.list_dns_threat_detectors", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListDnsThreatDetectors", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.UpdateDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Call the update dns threat + detector method over HTTP. + + Args: + request (~.gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest): + The request object. The message for updating a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcn_dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.UpdateDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "UpdateDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._UpdateDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcn_dns_threat_detector.DnsThreatDetector() + pb_resp = gcn_dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gcn_dns_threat_detector.DnsThreatDetector.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.update_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "UpdateDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + dns_threat_detector.ListDnsThreatDetectorsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDnsThreatDetectors(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.DnsThreatDetectorServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DnsThreatDetectorServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest_base.py new file mode 100644 index 000000000000..351daab4c7d7 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/dns_threat_detector_service/transports/rest_base.py @@ -0,0 +1,674 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport + + +class _BaseDnsThreatDetectorServiceRestTransport(DnsThreatDetectorServiceTransport): + """Base REST backend transport for DnsThreatDetectorService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/dnsThreatDetectors", + "body": "dns_threat_detector", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/dnsThreatDetectors/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.DeleteDnsThreatDetectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/dnsThreatDetectors/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.GetDnsThreatDetectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDnsThreatDetectors: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/dnsThreatDetectors", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.ListDnsThreatDetectorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{dns_threat_detector.name=projects/*/locations/*/dnsThreatDetectors/*}", + "body": "dns_threat_detector", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDnsThreatDetectorServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/__init__.py new file mode 100644 index 000000000000..6d56329744b4 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import FirewallActivationAsyncClient +from .client import FirewallActivationClient + +__all__ = ( + "FirewallActivationClient", + "FirewallActivationAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/async_client.py new file mode 100644 index 000000000000..f5d1962e7d0b --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/async_client.py @@ -0,0 +1,2332 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.firewall_activation import pagers +from google.cloud.network_security_v1alpha1.types import common, firewall_activation + +from .client import FirewallActivationClient +from .transports.base import DEFAULT_CLIENT_INFO, FirewallActivationTransport +from .transports.grpc_asyncio import FirewallActivationGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class FirewallActivationAsyncClient: + """Service for managing Firewall Endpoints and Associations.""" + + _client: FirewallActivationClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FirewallActivationClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FirewallActivationClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FirewallActivationClient._DEFAULT_UNIVERSE + + firewall_endpoint_path = staticmethod( + FirewallActivationClient.firewall_endpoint_path + ) + parse_firewall_endpoint_path = staticmethod( + FirewallActivationClient.parse_firewall_endpoint_path + ) + firewall_endpoint_association_path = staticmethod( + FirewallActivationClient.firewall_endpoint_association_path + ) + parse_firewall_endpoint_association_path = staticmethod( + FirewallActivationClient.parse_firewall_endpoint_association_path + ) + network_path = staticmethod(FirewallActivationClient.network_path) + parse_network_path = staticmethod(FirewallActivationClient.parse_network_path) + tls_inspection_policy_path = staticmethod( + FirewallActivationClient.tls_inspection_policy_path + ) + parse_tls_inspection_policy_path = staticmethod( + FirewallActivationClient.parse_tls_inspection_policy_path + ) + common_billing_account_path = staticmethod( + FirewallActivationClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + FirewallActivationClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FirewallActivationClient.common_folder_path) + parse_common_folder_path = staticmethod( + FirewallActivationClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + FirewallActivationClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + FirewallActivationClient.parse_common_organization_path + ) + common_project_path = staticmethod(FirewallActivationClient.common_project_path) + parse_common_project_path = staticmethod( + FirewallActivationClient.parse_common_project_path + ) + common_location_path = staticmethod(FirewallActivationClient.common_location_path) + parse_common_location_path = staticmethod( + FirewallActivationClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallActivationAsyncClient: The constructed client. + """ + return FirewallActivationClient.from_service_account_info.__func__(FirewallActivationAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallActivationAsyncClient: The constructed client. + """ + return FirewallActivationClient.from_service_account_file.__func__(FirewallActivationAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FirewallActivationClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FirewallActivationTransport: + """Returns the transport used by the client instance. + + Returns: + FirewallActivationTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = FirewallActivationClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FirewallActivationTransport, + Callable[..., FirewallActivationTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firewall activation async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FirewallActivationTransport,Callable[..., FirewallActivationTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FirewallActivationTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FirewallActivationClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "credentialsType": None, + }, + ) + + async def list_firewall_endpoints( + self, + request: Optional[ + Union[firewall_activation.ListFirewallEndpointsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFirewallEndpointsAsyncPager: + r"""Lists FirewallEndpoints in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_firewall_endpoints(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoints(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest, dict]]): + The request object. Message for requesting list of + Endpoints + parent (:class:`str`): + Required. Parent value for + ListEndpointsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointsAsyncPager: + Message for response to listing + Endpoints + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.ListFirewallEndpointsRequest): + request = firewall_activation.ListFirewallEndpointsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_firewall_endpoints + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFirewallEndpointsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.GetFirewallEndpointRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpoint: + r"""Gets details of a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + response = await client.get_firewall_endpoint(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetFirewallEndpointRequest, dict]]): + The request object. Message for getting a Endpoint + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.FirewallEndpoint: + Message describing Endpoint object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.GetFirewallEndpointRequest): + request = firewall_activation.GetFirewallEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_firewall_endpoint + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.CreateFirewallEndpointRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + firewall_endpoint: Optional[firewall_activation.FirewallEndpoint] = None, + firewall_endpoint_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new FirewallEndpoint in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.create_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointRequest, dict]]): + The request object. Message for creating a Endpoint + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint (:class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint`): + Required. The resource being created + This corresponds to the ``firewall_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_id (:class:`str`): + Required. Id of the requesting object. If + auto-generating Id server-side, remove this field and + firewall_endpoint_id from the method_signature of Create + RPC. + + This corresponds to the ``firewall_endpoint_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint` + Message describing Endpoint object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, firewall_endpoint, firewall_endpoint_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.CreateFirewallEndpointRequest): + request = firewall_activation.CreateFirewallEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if firewall_endpoint is not None: + request.firewall_endpoint = firewall_endpoint + if firewall_endpoint_id is not None: + request.firewall_endpoint_id = firewall_endpoint_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_firewall_endpoint + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + firewall_activation.FirewallEndpoint, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.DeleteFirewallEndpointRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointRequest, dict]]): + The request object. Message for deleting a Endpoint + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.DeleteFirewallEndpointRequest): + request = firewall_activation.DeleteFirewallEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_firewall_endpoint + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.UpdateFirewallEndpointRequest, dict] + ] = None, + *, + firewall_endpoint: Optional[firewall_activation.FirewallEndpoint] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointRequest( + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.update_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointRequest, dict]]): + The request object. Message for updating a Endpoint + firewall_endpoint (:class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint`): + Required. The resource being updated + This corresponds to the ``firewall_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Endpoint resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint` + Message describing Endpoint object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [firewall_endpoint, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.UpdateFirewallEndpointRequest): + request = firewall_activation.UpdateFirewallEndpointRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_endpoint is not None: + request.firewall_endpoint = firewall_endpoint + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_firewall_endpoint + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("firewall_endpoint.name", request.firewall_endpoint.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + firewall_activation.FirewallEndpoint, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_firewall_endpoint_associations( + self, + request: Optional[ + Union[firewall_activation.ListFirewallEndpointAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFirewallEndpointAssociationsAsyncPager: + r"""Lists Associations in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_firewall_endpoint_associations(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoint_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest, dict]]): + The request object. Message for requesting list of + Associations + parent (:class:`str`): + Required. Parent value for + ListAssociationsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointAssociationsAsyncPager: + Message for response to listing + Associations + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.ListFirewallEndpointAssociationsRequest + ): + request = firewall_activation.ListFirewallEndpointAssociationsRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_firewall_endpoint_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFirewallEndpointAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.GetFirewallEndpointAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpointAssociation: + r"""Gets details of a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_firewall_endpoint_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetFirewallEndpointAssociationRequest, dict]]): + The request object. Message for getting a Association + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation: + Message describing Association object + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.GetFirewallEndpointAssociationRequest + ): + request = firewall_activation.GetFirewallEndpointAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.CreateFirewallEndpointAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + firewall_endpoint_association: Optional[ + firewall_activation.FirewallEndpointAssociation + ] = None, + firewall_endpoint_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new FirewallEndpointAssociation in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint_association = network_security_v1alpha1.FirewallEndpointAssociation() + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.create_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointAssociationRequest, dict]]): + The request object. Message for creating a Association + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_association (:class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation`): + Required. The resource being created + This corresponds to the ``firewall_endpoint_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_association_id (:class:`str`): + Optional. Id of the requesting object. If + auto-generating Id server-side, remove this field and + firewall_endpoint_association_id from the + method_signature of Create RPC. + + This corresponds to the ``firewall_endpoint_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation` + Message describing Association object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + firewall_endpoint_association, + firewall_endpoint_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.CreateFirewallEndpointAssociationRequest + ): + request = firewall_activation.CreateFirewallEndpointAssociationRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if firewall_endpoint_association is not None: + request.firewall_endpoint_association = firewall_endpoint_association + if firewall_endpoint_association_id is not None: + request.firewall_endpoint_association_id = firewall_endpoint_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + firewall_activation.FirewallEndpointAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.DeleteFirewallEndpointAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointAssociationRequest, dict]]): + The request object. Message for deleting a Association + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.DeleteFirewallEndpointAssociationRequest + ): + request = firewall_activation.DeleteFirewallEndpointAssociationRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.UpdateFirewallEndpointAssociationRequest, dict] + ] = None, + *, + firewall_endpoint_association: Optional[ + firewall_activation.FirewallEndpointAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint_association = network_security_v1alpha1.FirewallEndpointAssociation() + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointAssociationRequest( + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.update_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointAssociationRequest, dict]]): + The request object. Message for updating an Association + firewall_endpoint_association (:class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation`): + Required. The resource being updated + This corresponds to the ``firewall_endpoint_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Association resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation` + Message describing Association object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [firewall_endpoint_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.UpdateFirewallEndpointAssociationRequest + ): + request = firewall_activation.UpdateFirewallEndpointAssociationRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_endpoint_association is not None: + request.firewall_endpoint_association = firewall_endpoint_association + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "firewall_endpoint_association.name", + request.firewall_endpoint_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + firewall_activation.FirewallEndpointAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "FirewallActivationAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("FirewallActivationAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/client.py new file mode 100644 index 000000000000..9b2bcd98a237 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/client.py @@ -0,0 +1,2815 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.firewall_activation import pagers +from google.cloud.network_security_v1alpha1.types import common, firewall_activation + +from .transports.base import DEFAULT_CLIENT_INFO, FirewallActivationTransport +from .transports.grpc import FirewallActivationGrpcTransport +from .transports.grpc_asyncio import FirewallActivationGrpcAsyncIOTransport +from .transports.rest import FirewallActivationRestTransport + + +class FirewallActivationClientMeta(type): + """Metaclass for the FirewallActivation client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[FirewallActivationTransport]] + _transport_registry["grpc"] = FirewallActivationGrpcTransport + _transport_registry["grpc_asyncio"] = FirewallActivationGrpcAsyncIOTransport + _transport_registry["rest"] = FirewallActivationRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[FirewallActivationTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FirewallActivationClient(metaclass=FirewallActivationClientMeta): + """Service for managing Firewall Endpoints and Associations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallActivationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallActivationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FirewallActivationTransport: + """Returns the transport used by the client instance. + + Returns: + FirewallActivationTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def firewall_endpoint_path( + organization: str, + location: str, + firewall_endpoint: str, + ) -> str: + """Returns a fully-qualified firewall_endpoint string.""" + return "organizations/{organization}/locations/{location}/firewallEndpoints/{firewall_endpoint}".format( + organization=organization, + location=location, + firewall_endpoint=firewall_endpoint, + ) + + @staticmethod + def parse_firewall_endpoint_path(path: str) -> Dict[str, str]: + """Parses a firewall_endpoint path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/firewallEndpoints/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def firewall_endpoint_association_path( + project: str, + location: str, + firewall_endpoint_association: str, + ) -> str: + """Returns a fully-qualified firewall_endpoint_association string.""" + return "projects/{project}/locations/{location}/firewallEndpointAssociations/{firewall_endpoint_association}".format( + project=project, + location=location, + firewall_endpoint_association=firewall_endpoint_association, + ) + + @staticmethod + def parse_firewall_endpoint_association_path(path: str) -> Dict[str, str]: + """Parses a firewall_endpoint_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/firewallEndpointAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def tls_inspection_policy_path( + project: str, + location: str, + tls_inspection_policy: str, + ) -> str: + """Returns a fully-qualified tls_inspection_policy string.""" + return "projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}".format( + project=project, + location=location, + tls_inspection_policy=tls_inspection_policy, + ) + + @staticmethod + def parse_tls_inspection_policy_path(path: str) -> Dict[str, str]: + """Parses a tls_inspection_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/tlsInspectionPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = FirewallActivationClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = FirewallActivationClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FirewallActivationClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + FirewallActivationTransport, + Callable[..., FirewallActivationTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firewall activation client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FirewallActivationTransport,Callable[..., FirewallActivationTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FirewallActivationTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = FirewallActivationClient._read_environment_variables() + self._client_cert_source = FirewallActivationClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = FirewallActivationClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FirewallActivationTransport) + if transport_provided: + # transport is a FirewallActivationTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FirewallActivationTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or FirewallActivationClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[FirewallActivationTransport], + Callable[..., FirewallActivationTransport], + ] = ( + FirewallActivationClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FirewallActivationTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.FirewallActivationClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "credentialsType": None, + }, + ) + + def list_firewall_endpoints( + self, + request: Optional[ + Union[firewall_activation.ListFirewallEndpointsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFirewallEndpointsPager: + r"""Lists FirewallEndpoints in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_firewall_endpoints(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest, dict]): + The request object. Message for requesting list of + Endpoints + parent (str): + Required. Parent value for + ListEndpointsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointsPager: + Message for response to listing + Endpoints + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.ListFirewallEndpointsRequest): + request = firewall_activation.ListFirewallEndpointsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_firewall_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFirewallEndpointsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.GetFirewallEndpointRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpoint: + r"""Gets details of a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + response = client.get_firewall_endpoint(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetFirewallEndpointRequest, dict]): + The request object. Message for getting a Endpoint + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.FirewallEndpoint: + Message describing Endpoint object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.GetFirewallEndpointRequest): + request = firewall_activation.GetFirewallEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_firewall_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.CreateFirewallEndpointRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + firewall_endpoint: Optional[firewall_activation.FirewallEndpoint] = None, + firewall_endpoint_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new FirewallEndpoint in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.create_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointRequest, dict]): + The request object. Message for creating a Endpoint + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint (google.cloud.network_security_v1alpha1.types.FirewallEndpoint): + Required. The resource being created + This corresponds to the ``firewall_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_id (str): + Required. Id of the requesting object. If + auto-generating Id server-side, remove this field and + firewall_endpoint_id from the method_signature of Create + RPC. + + This corresponds to the ``firewall_endpoint_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint` + Message describing Endpoint object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, firewall_endpoint, firewall_endpoint_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.CreateFirewallEndpointRequest): + request = firewall_activation.CreateFirewallEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if firewall_endpoint is not None: + request.firewall_endpoint = firewall_endpoint + if firewall_endpoint_id is not None: + request.firewall_endpoint_id = firewall_endpoint_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_firewall_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + firewall_activation.FirewallEndpoint, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.DeleteFirewallEndpointRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointRequest, dict]): + The request object. Message for deleting a Endpoint + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.DeleteFirewallEndpointRequest): + request = firewall_activation.DeleteFirewallEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_firewall_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_firewall_endpoint( + self, + request: Optional[ + Union[firewall_activation.UpdateFirewallEndpointRequest, dict] + ] = None, + *, + firewall_endpoint: Optional[firewall_activation.FirewallEndpoint] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single Endpoint. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointRequest( + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.update_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointRequest, dict]): + The request object. Message for updating a Endpoint + firewall_endpoint (google.cloud.network_security_v1alpha1.types.FirewallEndpoint): + Required. The resource being updated + This corresponds to the ``firewall_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Endpoint resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpoint` + Message describing Endpoint object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [firewall_endpoint, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firewall_activation.UpdateFirewallEndpointRequest): + request = firewall_activation.UpdateFirewallEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_endpoint is not None: + request.firewall_endpoint = firewall_endpoint + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_firewall_endpoint] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("firewall_endpoint.name", request.firewall_endpoint.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + firewall_activation.FirewallEndpoint, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_firewall_endpoint_associations( + self, + request: Optional[ + Union[firewall_activation.ListFirewallEndpointAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFirewallEndpointAssociationsPager: + r"""Lists Associations in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_firewall_endpoint_associations(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoint_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest, dict]): + The request object. Message for requesting list of + Associations + parent (str): + Required. Parent value for + ListAssociationsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointAssociationsPager: + Message for response to listing + Associations + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.ListFirewallEndpointAssociationsRequest + ): + request = firewall_activation.ListFirewallEndpointAssociationsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_firewall_endpoint_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFirewallEndpointAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.GetFirewallEndpointAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpointAssociation: + r"""Gets details of a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_firewall_endpoint_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetFirewallEndpointAssociationRequest, dict]): + The request object. Message for getting a Association + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation: + Message describing Association object + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.GetFirewallEndpointAssociationRequest + ): + request = firewall_activation.GetFirewallEndpointAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.CreateFirewallEndpointAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + firewall_endpoint_association: Optional[ + firewall_activation.FirewallEndpointAssociation + ] = None, + firewall_endpoint_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new FirewallEndpointAssociation in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint_association = network_security_v1alpha1.FirewallEndpointAssociation() + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.create_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointAssociationRequest, dict]): + The request object. Message for creating a Association + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_association (google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation): + Required. The resource being created + This corresponds to the ``firewall_endpoint_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_endpoint_association_id (str): + Optional. Id of the requesting object. If + auto-generating Id server-side, remove this field and + firewall_endpoint_association_id from the + method_signature of Create RPC. + + This corresponds to the ``firewall_endpoint_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation` + Message describing Association object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + firewall_endpoint_association, + firewall_endpoint_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.CreateFirewallEndpointAssociationRequest + ): + request = firewall_activation.CreateFirewallEndpointAssociationRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if firewall_endpoint_association is not None: + request.firewall_endpoint_association = firewall_endpoint_association + if firewall_endpoint_association_id is not None: + request.firewall_endpoint_association_id = ( + firewall_endpoint_association_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + firewall_activation.FirewallEndpointAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.DeleteFirewallEndpointAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointAssociationRequest, dict]): + The request object. Message for deleting a Association + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.DeleteFirewallEndpointAssociationRequest + ): + request = firewall_activation.DeleteFirewallEndpointAssociationRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_firewall_endpoint_association( + self, + request: Optional[ + Union[firewall_activation.UpdateFirewallEndpointAssociationRequest, dict] + ] = None, + *, + firewall_endpoint_association: Optional[ + firewall_activation.FirewallEndpointAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a single FirewallEndpointAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint_association = network_security_v1alpha1.FirewallEndpointAssociation() + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointAssociationRequest( + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.update_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointAssociationRequest, dict]): + The request object. Message for updating an Association + firewall_endpoint_association (google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation): + Required. The resource being updated + This corresponds to the ``firewall_endpoint_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Association resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation` + Message describing Association object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [firewall_endpoint_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, firewall_activation.UpdateFirewallEndpointAssociationRequest + ): + request = firewall_activation.UpdateFirewallEndpointAssociationRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_endpoint_association is not None: + request.firewall_endpoint_association = firewall_endpoint_association + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_firewall_endpoint_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "firewall_endpoint_association.name", + request.firewall_endpoint_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + firewall_activation.FirewallEndpointAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FirewallActivationClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("FirewallActivationClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/pagers.py new file mode 100644 index 000000000000..04f2a4d09b95 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/pagers.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import firewall_activation + + +class ListFirewallEndpointsPager: + """A pager for iterating through ``list_firewall_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``firewall_endpoints`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFirewallEndpoints`` requests and continue to iterate + through the ``firewall_endpoints`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., firewall_activation.ListFirewallEndpointsResponse], + request: firewall_activation.ListFirewallEndpointsRequest, + response: firewall_activation.ListFirewallEndpointsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = firewall_activation.ListFirewallEndpointsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[firewall_activation.ListFirewallEndpointsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[firewall_activation.FirewallEndpoint]: + for page in self.pages: + yield from page.firewall_endpoints + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFirewallEndpointsAsyncPager: + """A pager for iterating through ``list_firewall_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``firewall_endpoints`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFirewallEndpoints`` requests and continue to iterate + through the ``firewall_endpoints`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[firewall_activation.ListFirewallEndpointsResponse] + ], + request: firewall_activation.ListFirewallEndpointsRequest, + response: firewall_activation.ListFirewallEndpointsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = firewall_activation.ListFirewallEndpointsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[firewall_activation.ListFirewallEndpointsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[firewall_activation.FirewallEndpoint]: + async def async_generator(): + async for page in self.pages: + for response in page.firewall_endpoints: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFirewallEndpointAssociationsPager: + """A pager for iterating through ``list_firewall_endpoint_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``firewall_endpoint_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFirewallEndpointAssociations`` requests and continue to iterate + through the ``firewall_endpoint_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., firewall_activation.ListFirewallEndpointAssociationsResponse + ], + request: firewall_activation.ListFirewallEndpointAssociationsRequest, + response: firewall_activation.ListFirewallEndpointAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = firewall_activation.ListFirewallEndpointAssociationsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[firewall_activation.ListFirewallEndpointAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[firewall_activation.FirewallEndpointAssociation]: + for page in self.pages: + yield from page.firewall_endpoint_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFirewallEndpointAssociationsAsyncPager: + """A pager for iterating through ``list_firewall_endpoint_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``firewall_endpoint_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFirewallEndpointAssociations`` requests and continue to iterate + through the ``firewall_endpoint_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[firewall_activation.ListFirewallEndpointAssociationsResponse] + ], + request: firewall_activation.ListFirewallEndpointAssociationsRequest, + response: firewall_activation.ListFirewallEndpointAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = firewall_activation.ListFirewallEndpointAssociationsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[firewall_activation.ListFirewallEndpointAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[firewall_activation.FirewallEndpointAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.firewall_endpoint_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/README.rst new file mode 100644 index 000000000000..46d58acf4c86 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`FirewallActivationTransport` is the ABC for all transports. +- public child `FirewallActivationGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `FirewallActivationGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseFirewallActivationRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `FirewallActivationRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/__init__.py new file mode 100644 index 000000000000..7de09082beee --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FirewallActivationTransport +from .grpc import FirewallActivationGrpcTransport +from .grpc_asyncio import FirewallActivationGrpcAsyncIOTransport +from .rest import FirewallActivationRestInterceptor, FirewallActivationRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[FirewallActivationTransport]] +_transport_registry["grpc"] = FirewallActivationGrpcTransport +_transport_registry["grpc_asyncio"] = FirewallActivationGrpcAsyncIOTransport +_transport_registry["rest"] = FirewallActivationRestTransport + +__all__ = ( + "FirewallActivationTransport", + "FirewallActivationGrpcTransport", + "FirewallActivationGrpcAsyncIOTransport", + "FirewallActivationRestTransport", + "FirewallActivationRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/base.py new file mode 100644 index 000000000000..73ccf713746b --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/base.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import firewall_activation + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class FirewallActivationTransport(abc.ABC): + """Abstract transport class for FirewallActivation.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_firewall_endpoints: gapic_v1.method.wrap_method( + self.list_firewall_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.get_firewall_endpoint: gapic_v1.method.wrap_method( + self.get_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.create_firewall_endpoint: gapic_v1.method.wrap_method( + self.create_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.delete_firewall_endpoint: gapic_v1.method.wrap_method( + self.delete_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.update_firewall_endpoint: gapic_v1.method.wrap_method( + self.update_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.list_firewall_endpoint_associations: gapic_v1.method.wrap_method( + self.list_firewall_endpoint_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_firewall_endpoint_association: gapic_v1.method.wrap_method( + self.get_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.create_firewall_endpoint_association: gapic_v1.method.wrap_method( + self.create_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_firewall_endpoint_association: gapic_v1.method.wrap_method( + self.delete_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.update_firewall_endpoint_association: gapic_v1.method.wrap_method( + self.update_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_firewall_endpoints( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointsRequest], + Union[ + firewall_activation.ListFirewallEndpointsResponse, + Awaitable[firewall_activation.ListFirewallEndpointsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointRequest], + Union[ + firewall_activation.FirewallEndpoint, + Awaitable[firewall_activation.FirewallEndpoint], + ], + ]: + raise NotImplementedError() + + @property + def create_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_firewall_endpoint_associations( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointAssociationsRequest], + Union[ + firewall_activation.ListFirewallEndpointAssociationsResponse, + Awaitable[firewall_activation.ListFirewallEndpointAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointAssociationRequest], + Union[ + firewall_activation.FirewallEndpointAssociation, + Awaitable[firewall_activation.FirewallEndpointAssociation], + ], + ]: + raise NotImplementedError() + + @property + def create_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("FirewallActivationTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc.py new file mode 100644 index 000000000000..0803591459be --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc.py @@ -0,0 +1,844 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import firewall_activation + +from .base import DEFAULT_CLIENT_INFO, FirewallActivationTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class FirewallActivationGrpcTransport(FirewallActivationTransport): + """gRPC backend transport for FirewallActivation. + + Service for managing Firewall Endpoints and Associations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_firewall_endpoints( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointsRequest], + firewall_activation.ListFirewallEndpointsResponse, + ]: + r"""Return a callable for the list firewall endpoints method over gRPC. + + Lists FirewallEndpoints in a given project and + location. + + Returns: + Callable[[~.ListFirewallEndpointsRequest], + ~.ListFirewallEndpointsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_firewall_endpoints" not in self._stubs: + self._stubs["list_firewall_endpoints"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/ListFirewallEndpoints", + request_serializer=firewall_activation.ListFirewallEndpointsRequest.serialize, + response_deserializer=firewall_activation.ListFirewallEndpointsResponse.deserialize, + ) + return self._stubs["list_firewall_endpoints"] + + @property + def get_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointRequest], + firewall_activation.FirewallEndpoint, + ]: + r"""Return a callable for the get firewall endpoint method over gRPC. + + Gets details of a single Endpoint. + + Returns: + Callable[[~.GetFirewallEndpointRequest], + ~.FirewallEndpoint]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_firewall_endpoint" not in self._stubs: + self._stubs["get_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/GetFirewallEndpoint", + request_serializer=firewall_activation.GetFirewallEndpointRequest.serialize, + response_deserializer=firewall_activation.FirewallEndpoint.deserialize, + ) + return self._stubs["get_firewall_endpoint"] + + @property + def create_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create firewall endpoint method over gRPC. + + Creates a new FirewallEndpoint in a given project and + location. + + Returns: + Callable[[~.CreateFirewallEndpointRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_firewall_endpoint" not in self._stubs: + self._stubs["create_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/CreateFirewallEndpoint", + request_serializer=firewall_activation.CreateFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_firewall_endpoint"] + + @property + def delete_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete firewall endpoint method over gRPC. + + Deletes a single Endpoint. + + Returns: + Callable[[~.DeleteFirewallEndpointRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_firewall_endpoint" not in self._stubs: + self._stubs["delete_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/DeleteFirewallEndpoint", + request_serializer=firewall_activation.DeleteFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_firewall_endpoint"] + + @property + def update_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update firewall endpoint method over gRPC. + + Update a single Endpoint. + + Returns: + Callable[[~.UpdateFirewallEndpointRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_firewall_endpoint" not in self._stubs: + self._stubs["update_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/UpdateFirewallEndpoint", + request_serializer=firewall_activation.UpdateFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_firewall_endpoint"] + + @property + def list_firewall_endpoint_associations( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointAssociationsRequest], + firewall_activation.ListFirewallEndpointAssociationsResponse, + ]: + r"""Return a callable for the list firewall endpoint + associations method over gRPC. + + Lists Associations in a given project and location. + + Returns: + Callable[[~.ListFirewallEndpointAssociationsRequest], + ~.ListFirewallEndpointAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_firewall_endpoint_associations" not in self._stubs: + self._stubs[ + "list_firewall_endpoint_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/ListFirewallEndpointAssociations", + request_serializer=firewall_activation.ListFirewallEndpointAssociationsRequest.serialize, + response_deserializer=firewall_activation.ListFirewallEndpointAssociationsResponse.deserialize, + ) + return self._stubs["list_firewall_endpoint_associations"] + + @property + def get_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointAssociationRequest], + firewall_activation.FirewallEndpointAssociation, + ]: + r"""Return a callable for the get firewall endpoint + association method over gRPC. + + Gets details of a single FirewallEndpointAssociation. + + Returns: + Callable[[~.GetFirewallEndpointAssociationRequest], + ~.FirewallEndpointAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "get_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/GetFirewallEndpointAssociation", + request_serializer=firewall_activation.GetFirewallEndpointAssociationRequest.serialize, + response_deserializer=firewall_activation.FirewallEndpointAssociation.deserialize, + ) + return self._stubs["get_firewall_endpoint_association"] + + @property + def create_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create firewall endpoint + association method over gRPC. + + Creates a new FirewallEndpointAssociation in a given + project and location. + + Returns: + Callable[[~.CreateFirewallEndpointAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "create_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/CreateFirewallEndpointAssociation", + request_serializer=firewall_activation.CreateFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_firewall_endpoint_association"] + + @property + def delete_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete firewall endpoint + association method over gRPC. + + Deletes a single FirewallEndpointAssociation. + + Returns: + Callable[[~.DeleteFirewallEndpointAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "delete_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/DeleteFirewallEndpointAssociation", + request_serializer=firewall_activation.DeleteFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_firewall_endpoint_association"] + + @property + def update_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update firewall endpoint + association method over gRPC. + + Update a single FirewallEndpointAssociation. + + Returns: + Callable[[~.UpdateFirewallEndpointAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "update_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/UpdateFirewallEndpointAssociation", + request_serializer=firewall_activation.UpdateFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_firewall_endpoint_association"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("FirewallActivationGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2bd7fdbb5d8d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/grpc_asyncio.py @@ -0,0 +1,960 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import firewall_activation + +from .base import DEFAULT_CLIENT_INFO, FirewallActivationTransport +from .grpc import FirewallActivationGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class FirewallActivationGrpcAsyncIOTransport(FirewallActivationTransport): + """gRPC AsyncIO backend transport for FirewallActivation. + + Service for managing Firewall Endpoints and Associations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_firewall_endpoints( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointsRequest], + Awaitable[firewall_activation.ListFirewallEndpointsResponse], + ]: + r"""Return a callable for the list firewall endpoints method over gRPC. + + Lists FirewallEndpoints in a given project and + location. + + Returns: + Callable[[~.ListFirewallEndpointsRequest], + Awaitable[~.ListFirewallEndpointsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_firewall_endpoints" not in self._stubs: + self._stubs["list_firewall_endpoints"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/ListFirewallEndpoints", + request_serializer=firewall_activation.ListFirewallEndpointsRequest.serialize, + response_deserializer=firewall_activation.ListFirewallEndpointsResponse.deserialize, + ) + return self._stubs["list_firewall_endpoints"] + + @property + def get_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointRequest], + Awaitable[firewall_activation.FirewallEndpoint], + ]: + r"""Return a callable for the get firewall endpoint method over gRPC. + + Gets details of a single Endpoint. + + Returns: + Callable[[~.GetFirewallEndpointRequest], + Awaitable[~.FirewallEndpoint]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_firewall_endpoint" not in self._stubs: + self._stubs["get_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/GetFirewallEndpoint", + request_serializer=firewall_activation.GetFirewallEndpointRequest.serialize, + response_deserializer=firewall_activation.FirewallEndpoint.deserialize, + ) + return self._stubs["get_firewall_endpoint"] + + @property + def create_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create firewall endpoint method over gRPC. + + Creates a new FirewallEndpoint in a given project and + location. + + Returns: + Callable[[~.CreateFirewallEndpointRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_firewall_endpoint" not in self._stubs: + self._stubs["create_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/CreateFirewallEndpoint", + request_serializer=firewall_activation.CreateFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_firewall_endpoint"] + + @property + def delete_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete firewall endpoint method over gRPC. + + Deletes a single Endpoint. + + Returns: + Callable[[~.DeleteFirewallEndpointRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_firewall_endpoint" not in self._stubs: + self._stubs["delete_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/DeleteFirewallEndpoint", + request_serializer=firewall_activation.DeleteFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_firewall_endpoint"] + + @property + def update_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update firewall endpoint method over gRPC. + + Update a single Endpoint. + + Returns: + Callable[[~.UpdateFirewallEndpointRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_firewall_endpoint" not in self._stubs: + self._stubs["update_firewall_endpoint"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/UpdateFirewallEndpoint", + request_serializer=firewall_activation.UpdateFirewallEndpointRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_firewall_endpoint"] + + @property + def list_firewall_endpoint_associations( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointAssociationsRequest], + Awaitable[firewall_activation.ListFirewallEndpointAssociationsResponse], + ]: + r"""Return a callable for the list firewall endpoint + associations method over gRPC. + + Lists Associations in a given project and location. + + Returns: + Callable[[~.ListFirewallEndpointAssociationsRequest], + Awaitable[~.ListFirewallEndpointAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_firewall_endpoint_associations" not in self._stubs: + self._stubs[ + "list_firewall_endpoint_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/ListFirewallEndpointAssociations", + request_serializer=firewall_activation.ListFirewallEndpointAssociationsRequest.serialize, + response_deserializer=firewall_activation.ListFirewallEndpointAssociationsResponse.deserialize, + ) + return self._stubs["list_firewall_endpoint_associations"] + + @property + def get_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointAssociationRequest], + Awaitable[firewall_activation.FirewallEndpointAssociation], + ]: + r"""Return a callable for the get firewall endpoint + association method over gRPC. + + Gets details of a single FirewallEndpointAssociation. + + Returns: + Callable[[~.GetFirewallEndpointAssociationRequest], + Awaitable[~.FirewallEndpointAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "get_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/GetFirewallEndpointAssociation", + request_serializer=firewall_activation.GetFirewallEndpointAssociationRequest.serialize, + response_deserializer=firewall_activation.FirewallEndpointAssociation.deserialize, + ) + return self._stubs["get_firewall_endpoint_association"] + + @property + def create_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create firewall endpoint + association method over gRPC. + + Creates a new FirewallEndpointAssociation in a given + project and location. + + Returns: + Callable[[~.CreateFirewallEndpointAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "create_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/CreateFirewallEndpointAssociation", + request_serializer=firewall_activation.CreateFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_firewall_endpoint_association"] + + @property + def delete_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete firewall endpoint + association method over gRPC. + + Deletes a single FirewallEndpointAssociation. + + Returns: + Callable[[~.DeleteFirewallEndpointAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "delete_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/DeleteFirewallEndpointAssociation", + request_serializer=firewall_activation.DeleteFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_firewall_endpoint_association"] + + @property + def update_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update firewall endpoint + association method over gRPC. + + Update a single FirewallEndpointAssociation. + + Returns: + Callable[[~.UpdateFirewallEndpointAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_firewall_endpoint_association" not in self._stubs: + self._stubs[ + "update_firewall_endpoint_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.FirewallActivation/UpdateFirewallEndpointAssociation", + request_serializer=firewall_activation.UpdateFirewallEndpointAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_firewall_endpoint_association"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_firewall_endpoints: self._wrap_method( + self.list_firewall_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.get_firewall_endpoint: self._wrap_method( + self.get_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.create_firewall_endpoint: self._wrap_method( + self.create_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.delete_firewall_endpoint: self._wrap_method( + self.delete_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.update_firewall_endpoint: self._wrap_method( + self.update_firewall_endpoint, + default_timeout=None, + client_info=client_info, + ), + self.list_firewall_endpoint_associations: self._wrap_method( + self.list_firewall_endpoint_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_firewall_endpoint_association: self._wrap_method( + self.get_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.create_firewall_endpoint_association: self._wrap_method( + self.create_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_firewall_endpoint_association: self._wrap_method( + self.delete_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.update_firewall_endpoint_association: self._wrap_method( + self.update_firewall_endpoint_association, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("FirewallActivationGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest.py new file mode 100644 index 000000000000..cb5a16e86fc7 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest.py @@ -0,0 +1,3969 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import firewall_activation + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseFirewallActivationRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class FirewallActivationRestInterceptor: + """Interceptor for FirewallActivation. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FirewallActivationRestTransport. + + .. code-block:: python + class MyCustomFirewallActivationInterceptor(FirewallActivationRestInterceptor): + def pre_create_firewall_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_firewall_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_firewall_endpoint_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_firewall_endpoint_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_firewall_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_firewall_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_firewall_endpoint_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_firewall_endpoint_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_firewall_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_firewall_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_firewall_endpoint_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_firewall_endpoint_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_firewall_endpoint_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_firewall_endpoint_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_firewall_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_firewall_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_firewall_endpoint(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_firewall_endpoint(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_firewall_endpoint_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_firewall_endpoint_association(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FirewallActivationRestTransport(interceptor=MyCustomFirewallActivationInterceptor()) + client = FirewallActivationClient(transport=transport) + + + """ + + def pre_create_firewall_endpoint( + self, + request: firewall_activation.CreateFirewallEndpointRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.CreateFirewallEndpointRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_firewall_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_create_firewall_endpoint( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_firewall_endpoint + + DEPRECATED. Please use the `post_create_firewall_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_create_firewall_endpoint` interceptor runs + before the `post_create_firewall_endpoint_with_metadata` interceptor. + """ + return response + + def post_create_firewall_endpoint_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_firewall_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_create_firewall_endpoint_with_metadata` + interceptor in new development instead of the `post_create_firewall_endpoint` interceptor. + When both interceptors are used, this `post_create_firewall_endpoint_with_metadata` interceptor runs after the + `post_create_firewall_endpoint` interceptor. The (possibly modified) response returned by + `post_create_firewall_endpoint` will be passed to + `post_create_firewall_endpoint_with_metadata`. + """ + return response, metadata + + def pre_create_firewall_endpoint_association( + self, + request: firewall_activation.CreateFirewallEndpointAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.CreateFirewallEndpointAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_firewall_endpoint_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_create_firewall_endpoint_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_firewall_endpoint_association + + DEPRECATED. Please use the `post_create_firewall_endpoint_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_create_firewall_endpoint_association` interceptor runs + before the `post_create_firewall_endpoint_association_with_metadata` interceptor. + """ + return response + + def post_create_firewall_endpoint_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_firewall_endpoint_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_create_firewall_endpoint_association_with_metadata` + interceptor in new development instead of the `post_create_firewall_endpoint_association` interceptor. + When both interceptors are used, this `post_create_firewall_endpoint_association_with_metadata` interceptor runs after the + `post_create_firewall_endpoint_association` interceptor. The (possibly modified) response returned by + `post_create_firewall_endpoint_association` will be passed to + `post_create_firewall_endpoint_association_with_metadata`. + """ + return response, metadata + + def pre_delete_firewall_endpoint( + self, + request: firewall_activation.DeleteFirewallEndpointRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.DeleteFirewallEndpointRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_firewall_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_delete_firewall_endpoint( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_firewall_endpoint + + DEPRECATED. Please use the `post_delete_firewall_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_delete_firewall_endpoint` interceptor runs + before the `post_delete_firewall_endpoint_with_metadata` interceptor. + """ + return response + + def post_delete_firewall_endpoint_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_firewall_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_delete_firewall_endpoint_with_metadata` + interceptor in new development instead of the `post_delete_firewall_endpoint` interceptor. + When both interceptors are used, this `post_delete_firewall_endpoint_with_metadata` interceptor runs after the + `post_delete_firewall_endpoint` interceptor. The (possibly modified) response returned by + `post_delete_firewall_endpoint` will be passed to + `post_delete_firewall_endpoint_with_metadata`. + """ + return response, metadata + + def pre_delete_firewall_endpoint_association( + self, + request: firewall_activation.DeleteFirewallEndpointAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.DeleteFirewallEndpointAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_firewall_endpoint_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_delete_firewall_endpoint_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_firewall_endpoint_association + + DEPRECATED. Please use the `post_delete_firewall_endpoint_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_delete_firewall_endpoint_association` interceptor runs + before the `post_delete_firewall_endpoint_association_with_metadata` interceptor. + """ + return response + + def post_delete_firewall_endpoint_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_firewall_endpoint_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_delete_firewall_endpoint_association_with_metadata` + interceptor in new development instead of the `post_delete_firewall_endpoint_association` interceptor. + When both interceptors are used, this `post_delete_firewall_endpoint_association_with_metadata` interceptor runs after the + `post_delete_firewall_endpoint_association` interceptor. The (possibly modified) response returned by + `post_delete_firewall_endpoint_association` will be passed to + `post_delete_firewall_endpoint_association_with_metadata`. + """ + return response, metadata + + def pre_get_firewall_endpoint( + self, + request: firewall_activation.GetFirewallEndpointRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.GetFirewallEndpointRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_firewall_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_get_firewall_endpoint( + self, response: firewall_activation.FirewallEndpoint + ) -> firewall_activation.FirewallEndpoint: + """Post-rpc interceptor for get_firewall_endpoint + + DEPRECATED. Please use the `post_get_firewall_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_get_firewall_endpoint` interceptor runs + before the `post_get_firewall_endpoint_with_metadata` interceptor. + """ + return response + + def post_get_firewall_endpoint_with_metadata( + self, + response: firewall_activation.FirewallEndpoint, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.FirewallEndpoint, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_firewall_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_get_firewall_endpoint_with_metadata` + interceptor in new development instead of the `post_get_firewall_endpoint` interceptor. + When both interceptors are used, this `post_get_firewall_endpoint_with_metadata` interceptor runs after the + `post_get_firewall_endpoint` interceptor. The (possibly modified) response returned by + `post_get_firewall_endpoint` will be passed to + `post_get_firewall_endpoint_with_metadata`. + """ + return response, metadata + + def pre_get_firewall_endpoint_association( + self, + request: firewall_activation.GetFirewallEndpointAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.GetFirewallEndpointAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_firewall_endpoint_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_get_firewall_endpoint_association( + self, response: firewall_activation.FirewallEndpointAssociation + ) -> firewall_activation.FirewallEndpointAssociation: + """Post-rpc interceptor for get_firewall_endpoint_association + + DEPRECATED. Please use the `post_get_firewall_endpoint_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_get_firewall_endpoint_association` interceptor runs + before the `post_get_firewall_endpoint_association_with_metadata` interceptor. + """ + return response + + def post_get_firewall_endpoint_association_with_metadata( + self, + response: firewall_activation.FirewallEndpointAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.FirewallEndpointAssociation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_firewall_endpoint_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_get_firewall_endpoint_association_with_metadata` + interceptor in new development instead of the `post_get_firewall_endpoint_association` interceptor. + When both interceptors are used, this `post_get_firewall_endpoint_association_with_metadata` interceptor runs after the + `post_get_firewall_endpoint_association` interceptor. The (possibly modified) response returned by + `post_get_firewall_endpoint_association` will be passed to + `post_get_firewall_endpoint_association_with_metadata`. + """ + return response, metadata + + def pre_list_firewall_endpoint_associations( + self, + request: firewall_activation.ListFirewallEndpointAssociationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.ListFirewallEndpointAssociationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_firewall_endpoint_associations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_list_firewall_endpoint_associations( + self, response: firewall_activation.ListFirewallEndpointAssociationsResponse + ) -> firewall_activation.ListFirewallEndpointAssociationsResponse: + """Post-rpc interceptor for list_firewall_endpoint_associations + + DEPRECATED. Please use the `post_list_firewall_endpoint_associations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_list_firewall_endpoint_associations` interceptor runs + before the `post_list_firewall_endpoint_associations_with_metadata` interceptor. + """ + return response + + def post_list_firewall_endpoint_associations_with_metadata( + self, + response: firewall_activation.ListFirewallEndpointAssociationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.ListFirewallEndpointAssociationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_firewall_endpoint_associations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_list_firewall_endpoint_associations_with_metadata` + interceptor in new development instead of the `post_list_firewall_endpoint_associations` interceptor. + When both interceptors are used, this `post_list_firewall_endpoint_associations_with_metadata` interceptor runs after the + `post_list_firewall_endpoint_associations` interceptor. The (possibly modified) response returned by + `post_list_firewall_endpoint_associations` will be passed to + `post_list_firewall_endpoint_associations_with_metadata`. + """ + return response, metadata + + def pre_list_firewall_endpoints( + self, + request: firewall_activation.ListFirewallEndpointsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.ListFirewallEndpointsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_firewall_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_list_firewall_endpoints( + self, response: firewall_activation.ListFirewallEndpointsResponse + ) -> firewall_activation.ListFirewallEndpointsResponse: + """Post-rpc interceptor for list_firewall_endpoints + + DEPRECATED. Please use the `post_list_firewall_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_list_firewall_endpoints` interceptor runs + before the `post_list_firewall_endpoints_with_metadata` interceptor. + """ + return response + + def post_list_firewall_endpoints_with_metadata( + self, + response: firewall_activation.ListFirewallEndpointsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.ListFirewallEndpointsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_firewall_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_list_firewall_endpoints_with_metadata` + interceptor in new development instead of the `post_list_firewall_endpoints` interceptor. + When both interceptors are used, this `post_list_firewall_endpoints_with_metadata` interceptor runs after the + `post_list_firewall_endpoints` interceptor. The (possibly modified) response returned by + `post_list_firewall_endpoints` will be passed to + `post_list_firewall_endpoints_with_metadata`. + """ + return response, metadata + + def pre_update_firewall_endpoint( + self, + request: firewall_activation.UpdateFirewallEndpointRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.UpdateFirewallEndpointRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_firewall_endpoint + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_update_firewall_endpoint( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_firewall_endpoint + + DEPRECATED. Please use the `post_update_firewall_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_update_firewall_endpoint` interceptor runs + before the `post_update_firewall_endpoint_with_metadata` interceptor. + """ + return response + + def post_update_firewall_endpoint_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_firewall_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_update_firewall_endpoint_with_metadata` + interceptor in new development instead of the `post_update_firewall_endpoint` interceptor. + When both interceptors are used, this `post_update_firewall_endpoint_with_metadata` interceptor runs after the + `post_update_firewall_endpoint` interceptor. The (possibly modified) response returned by + `post_update_firewall_endpoint` will be passed to + `post_update_firewall_endpoint_with_metadata`. + """ + return response, metadata + + def pre_update_firewall_endpoint_association( + self, + request: firewall_activation.UpdateFirewallEndpointAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firewall_activation.UpdateFirewallEndpointAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_firewall_endpoint_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_update_firewall_endpoint_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_firewall_endpoint_association + + DEPRECATED. Please use the `post_update_firewall_endpoint_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. This `post_update_firewall_endpoint_association` interceptor runs + before the `post_update_firewall_endpoint_association_with_metadata` interceptor. + """ + return response + + def post_update_firewall_endpoint_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_firewall_endpoint_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallActivation server but before it is returned to user code. + + We recommend only using this `post_update_firewall_endpoint_association_with_metadata` + interceptor in new development instead of the `post_update_firewall_endpoint_association` interceptor. + When both interceptors are used, this `post_update_firewall_endpoint_association_with_metadata` interceptor runs after the + `post_update_firewall_endpoint_association` interceptor. The (possibly modified) response returned by + `post_update_firewall_endpoint_association` will be passed to + `post_update_firewall_endpoint_association_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallActivation server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the FirewallActivation server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FirewallActivationRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FirewallActivationRestInterceptor + + +class FirewallActivationRestTransport(_BaseFirewallActivationRestTransport): + """REST backend synchronous transport for FirewallActivation. + + Service for managing Firewall Endpoints and Associations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FirewallActivationRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FirewallActivationRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateFirewallEndpoint( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.CreateFirewallEndpoint") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: firewall_activation.CreateFirewallEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create firewall endpoint method over HTTP. + + Args: + request (~.firewall_activation.CreateFirewallEndpointRequest): + The request object. Message for creating a Endpoint + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_firewall_endpoint( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.CreateFirewallEndpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "CreateFirewallEndpoint", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._CreateFirewallEndpoint._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_firewall_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_firewall_endpoint_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.create_firewall_endpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "CreateFirewallEndpoint", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateFirewallEndpointAssociation( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash( + "FirewallActivationRestTransport.CreateFirewallEndpointAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: firewall_activation.CreateFirewallEndpointAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create firewall endpoint + association method over HTTP. + + Args: + request (~.firewall_activation.CreateFirewallEndpointAssociationRequest): + The request object. Message for creating a Association + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_create_firewall_endpoint_association( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.CreateFirewallEndpointAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "CreateFirewallEndpointAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._CreateFirewallEndpointAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_firewall_endpoint_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_firewall_endpoint_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.create_firewall_endpoint_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "CreateFirewallEndpointAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteFirewallEndpoint( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpoint, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.DeleteFirewallEndpoint") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.DeleteFirewallEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete firewall endpoint method over HTTP. + + Args: + request (~.firewall_activation.DeleteFirewallEndpointRequest): + The request object. Message for deleting a Endpoint + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpoint._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_firewall_endpoint( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpoint._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpoint._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.DeleteFirewallEndpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "DeleteFirewallEndpoint", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._DeleteFirewallEndpoint._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_firewall_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_firewall_endpoint_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.delete_firewall_endpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "DeleteFirewallEndpoint", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteFirewallEndpointAssociation( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpointAssociation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash( + "FirewallActivationRestTransport.DeleteFirewallEndpointAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.DeleteFirewallEndpointAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete firewall endpoint + association method over HTTP. + + Args: + request (~.firewall_activation.DeleteFirewallEndpointAssociationRequest): + The request object. Message for deleting a Association + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpointAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_delete_firewall_endpoint_association( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpointAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpointAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.DeleteFirewallEndpointAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "DeleteFirewallEndpointAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._DeleteFirewallEndpointAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_firewall_endpoint_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_firewall_endpoint_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.delete_firewall_endpoint_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "DeleteFirewallEndpointAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetFirewallEndpoint( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpoint, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.GetFirewallEndpoint") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.GetFirewallEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpoint: + r"""Call the get firewall endpoint method over HTTP. + + Args: + request (~.firewall_activation.GetFirewallEndpointRequest): + The request object. Message for getting a Endpoint + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.firewall_activation.FirewallEndpoint: + Message describing Endpoint object. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpoint._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_firewall_endpoint( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseGetFirewallEndpoint._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseGetFirewallEndpoint._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.GetFirewallEndpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetFirewallEndpoint", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._GetFirewallEndpoint._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firewall_activation.FirewallEndpoint() + pb_resp = firewall_activation.FirewallEndpoint.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_firewall_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_firewall_endpoint_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firewall_activation.FirewallEndpoint.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.get_firewall_endpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetFirewallEndpoint", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetFirewallEndpointAssociation( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpointAssociation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash( + "FirewallActivationRestTransport.GetFirewallEndpointAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.GetFirewallEndpointAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.FirewallEndpointAssociation: + r"""Call the get firewall endpoint + association method over HTTP. + + Args: + request (~.firewall_activation.GetFirewallEndpointAssociationRequest): + The request object. Message for getting a Association + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.firewall_activation.FirewallEndpointAssociation: + Message describing Association object + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpointAssociation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_firewall_endpoint_association( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseGetFirewallEndpointAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseGetFirewallEndpointAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.GetFirewallEndpointAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetFirewallEndpointAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._GetFirewallEndpointAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firewall_activation.FirewallEndpointAssociation() + pb_resp = firewall_activation.FirewallEndpointAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_firewall_endpoint_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_firewall_endpoint_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + firewall_activation.FirewallEndpointAssociation.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.get_firewall_endpoint_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetFirewallEndpointAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFirewallEndpointAssociations( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpointAssociations, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash( + "FirewallActivationRestTransport.ListFirewallEndpointAssociations" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.ListFirewallEndpointAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.ListFirewallEndpointAssociationsResponse: + r"""Call the list firewall endpoint + associations method over HTTP. + + Args: + request (~.firewall_activation.ListFirewallEndpointAssociationsRequest): + The request object. Message for requesting list of + Associations + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.firewall_activation.ListFirewallEndpointAssociationsResponse: + Message for response to listing + Associations + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpointAssociations._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_firewall_endpoint_associations( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseListFirewallEndpointAssociations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseListFirewallEndpointAssociations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.ListFirewallEndpointAssociations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListFirewallEndpointAssociations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._ListFirewallEndpointAssociations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firewall_activation.ListFirewallEndpointAssociationsResponse() + pb_resp = firewall_activation.ListFirewallEndpointAssociationsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_firewall_endpoint_associations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_firewall_endpoint_associations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = firewall_activation.ListFirewallEndpointAssociationsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.list_firewall_endpoint_associations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListFirewallEndpointAssociations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFirewallEndpoints( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpoints, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.ListFirewallEndpoints") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: firewall_activation.ListFirewallEndpointsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> firewall_activation.ListFirewallEndpointsResponse: + r"""Call the list firewall endpoints method over HTTP. + + Args: + request (~.firewall_activation.ListFirewallEndpointsRequest): + The request object. Message for requesting list of + Endpoints + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.firewall_activation.ListFirewallEndpointsResponse: + Message for response to listing + Endpoints + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpoints._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_firewall_endpoints( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseListFirewallEndpoints._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseListFirewallEndpoints._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.ListFirewallEndpoints", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListFirewallEndpoints", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._ListFirewallEndpoints._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = firewall_activation.ListFirewallEndpointsResponse() + pb_resp = firewall_activation.ListFirewallEndpointsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_firewall_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_firewall_endpoints_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + firewall_activation.ListFirewallEndpointsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.list_firewall_endpoints", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListFirewallEndpoints", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateFirewallEndpoint( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.UpdateFirewallEndpoint") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: firewall_activation.UpdateFirewallEndpointRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update firewall endpoint method over HTTP. + + Args: + request (~.firewall_activation.UpdateFirewallEndpointRequest): + The request object. Message for updating a Endpoint + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_firewall_endpoint( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.UpdateFirewallEndpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "UpdateFirewallEndpoint", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._UpdateFirewallEndpoint._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_firewall_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_firewall_endpoint_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.update_firewall_endpoint", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "UpdateFirewallEndpoint", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateFirewallEndpointAssociation( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash( + "FirewallActivationRestTransport.UpdateFirewallEndpointAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: firewall_activation.UpdateFirewallEndpointAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update firewall endpoint + association method over HTTP. + + Args: + request (~.firewall_activation.UpdateFirewallEndpointAssociationRequest): + The request object. Message for updating an Association + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_update_firewall_endpoint_association( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.UpdateFirewallEndpointAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "UpdateFirewallEndpointAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._UpdateFirewallEndpointAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_firewall_endpoint_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_firewall_endpoint_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.update_firewall_endpoint_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "UpdateFirewallEndpointAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateFirewallEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.CreateFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateFirewallEndpointAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFirewallEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.DeleteFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFirewallEndpointAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointRequest], + firewall_activation.FirewallEndpoint, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFirewallEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.GetFirewallEndpointAssociationRequest], + firewall_activation.FirewallEndpointAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFirewallEndpointAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_firewall_endpoint_associations( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointAssociationsRequest], + firewall_activation.ListFirewallEndpointAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFirewallEndpointAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_firewall_endpoints( + self, + ) -> Callable[ + [firewall_activation.ListFirewallEndpointsRequest], + firewall_activation.ListFirewallEndpointsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFirewallEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_firewall_endpoint( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateFirewallEndpoint(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_firewall_endpoint_association( + self, + ) -> Callable[ + [firewall_activation.UpdateFirewallEndpointAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateFirewallEndpointAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseFirewallActivationRestTransport._BaseGetLocation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseFirewallActivationRestTransport._BaseListLocations, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseFirewallActivationRestTransport._BaseGetIamPolicy, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseFirewallActivationRestTransport._BaseSetIamPolicy, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseFirewallActivationRestTransport._BaseTestIamPermissions, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + FirewallActivationRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseFirewallActivationRestTransport._BaseCancelOperation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseFirewallActivationRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseFirewallActivationRestTransport._BaseDeleteOperation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseFirewallActivationRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseFirewallActivationRestTransport._BaseGetOperation, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseFirewallActivationRestTransport._BaseListOperations, + FirewallActivationRestStub, + ): + def __hash__(self): + return hash("FirewallActivationRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseFirewallActivationRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseFirewallActivationRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFirewallActivationRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.FirewallActivationClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirewallActivationRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.FirewallActivationAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FirewallActivationRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest_base.py new file mode 100644 index 000000000000..067e68f72077 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/firewall_activation/transports/rest_base.py @@ -0,0 +1,959 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import firewall_activation + +from .base import DEFAULT_CLIENT_INFO, FirewallActivationTransport + + +class _BaseFirewallActivationRestTransport(FirewallActivationTransport): + """Base REST backend transport for FirewallActivation. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateFirewallEndpoint: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "firewallEndpointId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/firewallEndpoints", + "body": "firewall_endpoint", + }, + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/firewallEndpoints", + "body": "firewall_endpoint", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.CreateFirewallEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpoint._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateFirewallEndpointAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/firewallEndpointAssociations", + "body": "firewall_endpoint_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + firewall_activation.CreateFirewallEndpointAssociationRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseCreateFirewallEndpointAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteFirewallEndpoint: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/firewallEndpoints/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/firewallEndpoints/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.DeleteFirewallEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpoint._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteFirewallEndpointAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/firewallEndpointAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + firewall_activation.DeleteFirewallEndpointAssociationRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseDeleteFirewallEndpointAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetFirewallEndpoint: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/firewallEndpoints/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/firewallEndpoints/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.GetFirewallEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpoint._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetFirewallEndpointAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/firewallEndpointAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.GetFirewallEndpointAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseGetFirewallEndpointAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFirewallEndpointAssociations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/firewallEndpointAssociations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.ListFirewallEndpointAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpointAssociations._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFirewallEndpoints: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/firewallEndpoints", + }, + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/firewallEndpoints", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.ListFirewallEndpointsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseListFirewallEndpoints._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateFirewallEndpoint: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{firewall_endpoint.name=organizations/*/locations/*/firewallEndpoints/*}", + "body": "firewall_endpoint", + }, + { + "method": "patch", + "uri": "/v1alpha1/{firewall_endpoint.name=projects/*/locations/*/firewallEndpoints/*}", + "body": "firewall_endpoint", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firewall_activation.UpdateFirewallEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpoint._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateFirewallEndpointAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{firewall_endpoint_association.name=projects/*/locations/*/firewallEndpointAssociations/*}", + "body": "firewall_endpoint_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + firewall_activation.UpdateFirewallEndpointAssociationRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirewallActivationRestTransport._BaseUpdateFirewallEndpointAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseFirewallActivationRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/__init__.py new file mode 100644 index 000000000000..61c7eb8d193c --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import InterceptAsyncClient +from .client import InterceptClient + +__all__ = ( + "InterceptClient", + "InterceptAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/async_client.py new file mode 100644 index 000000000000..c7a1f997f435 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/async_client.py @@ -0,0 +1,3811 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.intercept import pagers +from google.cloud.network_security_v1alpha1.types import common, intercept + +from .client import InterceptClient +from .transports.base import DEFAULT_CLIENT_INFO, InterceptTransport +from .transports.grpc_asyncio import InterceptGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class InterceptAsyncClient: + """Service for Third-Party Packet Intercept (TPPI). + TPPI is the "in-band" flavor of the Network Security + Integrations product. + """ + + _client: InterceptClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = InterceptClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = InterceptClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = InterceptClient._DEFAULT_UNIVERSE + + forwarding_rule_path = staticmethod(InterceptClient.forwarding_rule_path) + parse_forwarding_rule_path = staticmethod( + InterceptClient.parse_forwarding_rule_path + ) + intercept_deployment_path = staticmethod(InterceptClient.intercept_deployment_path) + parse_intercept_deployment_path = staticmethod( + InterceptClient.parse_intercept_deployment_path + ) + intercept_deployment_group_path = staticmethod( + InterceptClient.intercept_deployment_group_path + ) + parse_intercept_deployment_group_path = staticmethod( + InterceptClient.parse_intercept_deployment_group_path + ) + intercept_endpoint_group_path = staticmethod( + InterceptClient.intercept_endpoint_group_path + ) + parse_intercept_endpoint_group_path = staticmethod( + InterceptClient.parse_intercept_endpoint_group_path + ) + intercept_endpoint_group_association_path = staticmethod( + InterceptClient.intercept_endpoint_group_association_path + ) + parse_intercept_endpoint_group_association_path = staticmethod( + InterceptClient.parse_intercept_endpoint_group_association_path + ) + network_path = staticmethod(InterceptClient.network_path) + parse_network_path = staticmethod(InterceptClient.parse_network_path) + common_billing_account_path = staticmethod( + InterceptClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + InterceptClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(InterceptClient.common_folder_path) + parse_common_folder_path = staticmethod(InterceptClient.parse_common_folder_path) + common_organization_path = staticmethod(InterceptClient.common_organization_path) + parse_common_organization_path = staticmethod( + InterceptClient.parse_common_organization_path + ) + common_project_path = staticmethod(InterceptClient.common_project_path) + parse_common_project_path = staticmethod(InterceptClient.parse_common_project_path) + common_location_path = staticmethod(InterceptClient.common_location_path) + parse_common_location_path = staticmethod( + InterceptClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterceptAsyncClient: The constructed client. + """ + return InterceptClient.from_service_account_info.__func__(InterceptAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterceptAsyncClient: The constructed client. + """ + return InterceptClient.from_service_account_file.__func__(InterceptAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return InterceptClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> InterceptTransport: + """Returns the transport used by the client instance. + + Returns: + InterceptTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = InterceptClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, InterceptTransport, Callable[..., InterceptTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the intercept async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InterceptTransport,Callable[..., InterceptTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterceptTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = InterceptClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.InterceptAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "credentialsType": None, + }, + ) + + async def list_intercept_endpoint_groups( + self, + request: Optional[ + Union[intercept.ListInterceptEndpointGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptEndpointGroupsAsyncPager: + r"""Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_intercept_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest, dict]]): + The request object. Request message for + ListInterceptEndpointGroups. + parent (:class:`str`): + Required. The parent, which owns this collection of + endpoint groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupsAsyncPager: + Response message for + ListInterceptEndpointGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptEndpointGroupsRequest): + request = intercept.ListInterceptEndpointGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_intercept_endpoint_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInterceptEndpointGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.GetInterceptEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroup: + r"""Gets a specific endpoint group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_endpoint_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupRequest, dict]]): + The request object. Request message for + GetInterceptEndpointGroup. + name (:class:`str`): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + intercept for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A firewall rule that references the + security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptEndpointGroupRequest): + request = intercept.GetInterceptEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.CreateInterceptEndpointGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_endpoint_group: Optional[intercept.InterceptEndpointGroup] = None, + intercept_endpoint_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.create_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupRequest, dict]]): + The request object. Request message for + CreateInterceptEndpointGroup. + parent (:class:`str`): + Required. The parent resource where + this endpoint group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group (:class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup`): + Required. The endpoint group to + create. + + This corresponds to the ``intercept_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_id (:class:`str`): + Required. The ID to use for the + endpoint group, which will become the + final component of the endpoint group's + resource name. + + This corresponds to the ``intercept_endpoint_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure intercept for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A firewall rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_endpoint_group, + intercept_endpoint_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptEndpointGroupRequest): + request = intercept.CreateInterceptEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_endpoint_group is not None: + request.intercept_endpoint_group = intercept_endpoint_group + if intercept_endpoint_group_id is not None: + request.intercept_endpoint_group_id = intercept_endpoint_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.UpdateInterceptEndpointGroupRequest, dict] + ] = None, + *, + intercept_endpoint_group: Optional[intercept.InterceptEndpointGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an endpoint group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupRequest( + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.update_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupRequest, dict]]): + The request object. Request message for + UpdateInterceptEndpointGroup. + intercept_endpoint_group (:class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup`): + Required. The endpoint group to + update. + + This corresponds to the ``intercept_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the endpoint group (e.g. + ``description``; *not* + ``intercept_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure intercept for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A firewall rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_endpoint_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptEndpointGroupRequest): + request = intercept.UpdateInterceptEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_endpoint_group is not None: + request.intercept_endpoint_group = intercept_endpoint_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_endpoint_group.name", + request.intercept_endpoint_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.DeleteInterceptEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an endpoint group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupRequest, dict]]): + The request object. Request message for + DeleteInterceptEndpointGroup. + name (:class:`str`): + Required. The endpoint group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptEndpointGroupRequest): + request = intercept.DeleteInterceptEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_intercept_endpoint_group_associations( + self, + request: Optional[ + Union[intercept.ListInterceptEndpointGroupAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptEndpointGroupAssociationsAsyncPager: + r"""Lists associations in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_intercept_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_group_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest, dict]]): + The request object. Request message for + ListInterceptEndpointGroupAssociations. + parent (:class:`str`): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupAssociationsAsyncPager: + Response message for + ListInterceptEndpointGroupAssociations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.ListInterceptEndpointGroupAssociationsRequest + ): + request = intercept.ListInterceptEndpointGroupAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_intercept_endpoint_group_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInterceptEndpointGroupAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.GetInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroupAssociation: + r"""Gets a specific association. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_endpoint_group_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + GetInterceptEndpointGroupAssociation. + name (:class:`str`): + Required. The name of the association to retrieve. + Format: + projects/{project}/locations/{location}/interceptEndpointGroupAssociations/{intercept_endpoint_group_association} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable intercept by itself. To + enable intercept, the user must also + create a network firewall policy + containing intercept rules and associate + it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.GetInterceptEndpointGroupAssociationRequest + ): + request = intercept.GetInterceptEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.CreateInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_endpoint_group_association: Optional[ + intercept.InterceptEndpointGroupAssociation + ] = None, + intercept_endpoint_group_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an association in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = network_security_v1alpha1.InterceptEndpointGroupAssociation() + intercept_endpoint_group_association.intercept_endpoint_group = "intercept_endpoint_group_value" + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.create_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + CreateInterceptEndpointGroupAssociation. + parent (:class:`str`): + Required. The parent resource where + this association will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_association (:class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation`): + Required. The association to create. + This corresponds to the ``intercept_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_association_id (:class:`str`): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's + resource name. If not provided, the + server will generate a unique ID. + + This corresponds to the ``intercept_endpoint_group_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable intercept by itself. To + enable intercept, the user must also create a network + firewall policy containing intercept rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_endpoint_group_association, + intercept_endpoint_group_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.CreateInterceptEndpointGroupAssociationRequest + ): + request = intercept.CreateInterceptEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_endpoint_group_association is not None: + request.intercept_endpoint_group_association = ( + intercept_endpoint_group_association + ) + if intercept_endpoint_group_association_id is not None: + request.intercept_endpoint_group_association_id = ( + intercept_endpoint_group_association_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.UpdateInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + intercept_endpoint_group_association: Optional[ + intercept.InterceptEndpointGroupAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an association. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = network_security_v1alpha1.InterceptEndpointGroupAssociation() + intercept_endpoint_group_association.intercept_endpoint_group = "intercept_endpoint_group_value" + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupAssociationRequest( + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.update_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + UpdateInterceptEndpointGroupAssociation. + intercept_endpoint_group_association (:class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation`): + Required. The association to update. + This corresponds to the ``intercept_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the association (e.g. + ``description``; *not* + ``intercept_endpoint_group_association.description``). + See https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable intercept by itself. To + enable intercept, the user must also create a network + firewall policy containing intercept rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_endpoint_group_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.UpdateInterceptEndpointGroupAssociationRequest + ): + request = intercept.UpdateInterceptEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_endpoint_group_association is not None: + request.intercept_endpoint_group_association = ( + intercept_endpoint_group_association + ) + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_endpoint_group_association.name", + request.intercept_endpoint_group_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.DeleteInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an association. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + DeleteInterceptEndpointGroupAssociation. + name (:class:`str`): + Required. The association to delete. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.DeleteInterceptEndpointGroupAssociationRequest + ): + request = intercept.DeleteInterceptEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_intercept_deployment_groups( + self, + request: Optional[ + Union[intercept.ListInterceptDeploymentGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptDeploymentGroupsAsyncPager: + r"""Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_intercept_deployment_groups(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest, dict]]): + The request object. Request message for + ListInterceptDeploymentGroups. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentGroupsAsyncPager: + Response message for + ListInterceptDeploymentGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptDeploymentGroupsRequest): + request = intercept.ListInterceptDeploymentGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_intercept_deployment_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInterceptDeploymentGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.GetInterceptDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeploymentGroup: + r"""Gets a specific deployment group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentGroupRequest, dict]]): + The request object. Request message for + GetInterceptDeploymentGroup. + name (:class:`str`): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/interceptDeploymentGroups/{intercept_deployment_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup: + A deployment group aggregates many + zonal intercept backends (deployments) + into a single global intercept service. + Consumers can connect this service using + an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptDeploymentGroupRequest): + request = intercept.GetInterceptDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.CreateInterceptDeploymentGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_deployment_group: Optional[intercept.InterceptDeploymentGroup] = None, + intercept_deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.create_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentGroupRequest, dict]]): + The request object. Request message for + CreateInterceptDeploymentGroup. + parent (:class:`str`): + Required. The parent resource where + this deployment group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_group (:class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup`): + Required. The deployment group to + create. + + This corresponds to the ``intercept_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_group_id (:class:`str`): + Required. The ID to use for the new + deployment group, which will become the + final component of the deployment + group's resource name. + + This corresponds to the ``intercept_deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup` A deployment group aggregates many zonal intercept backends (deployments) + into a single global intercept service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_deployment_group, + intercept_deployment_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptDeploymentGroupRequest): + request = intercept.CreateInterceptDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_deployment_group is not None: + request.intercept_deployment_group = intercept_deployment_group + if intercept_deployment_group_id is not None: + request.intercept_deployment_group_id = intercept_deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.UpdateInterceptDeploymentGroupRequest, dict] + ] = None, + *, + intercept_deployment_group: Optional[intercept.InterceptDeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a deployment group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentGroupRequest( + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.update_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentGroupRequest, dict]]): + The request object. Request message for + UpdateInterceptDeploymentGroup. + intercept_deployment_group (:class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup`): + Required. The deployment group to + update. + + This corresponds to the ``intercept_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the deployment group (e.g. + ``description``; *not* + ``intercept_deployment_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup` A deployment group aggregates many zonal intercept backends (deployments) + into a single global intercept service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptDeploymentGroupRequest): + request = intercept.UpdateInterceptDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_deployment_group is not None: + request.intercept_deployment_group = intercept_deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_deployment_group.name", + request.intercept_deployment_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.DeleteInterceptDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a deployment group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentGroupRequest, dict]]): + The request object. Request message for + DeleteInterceptDeploymentGroup. + name (:class:`str`): + Required. The deployment group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptDeploymentGroupRequest): + request = intercept.DeleteInterceptDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_intercept_deployments( + self, + request: Optional[ + Union[intercept.ListInterceptDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptDeploymentsAsyncPager: + r"""Lists deployments in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_intercept_deployments(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest, dict]]): + The request object. Request message for + ListInterceptDeployments. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentsAsyncPager: + Response message for + ListInterceptDeployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptDeploymentsRequest): + request = intercept.ListInterceptDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_intercept_deployments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInterceptDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_intercept_deployment( + self, + request: Optional[Union[intercept.GetInterceptDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeployment: + r"""Gets a specific deployment. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentRequest, dict]]): + The request object. Request message for + GetInterceptDeployment. + name (:class:`str`): + Required. The name of the deployment to retrieve. + Format: + projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptDeployment: + A deployment represents a zonal + intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a + zonal instance group fronted by an + internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global intercept service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptDeploymentRequest): + request = intercept.GetInterceptDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_intercept_deployment( + self, + request: Optional[ + Union[intercept.CreateInterceptDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_deployment: Optional[intercept.InterceptDeployment] = None, + intercept_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.create_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentRequest, dict]]): + The request object. Request message for + CreateInterceptDeployment. + parent (:class:`str`): + Required. The parent resource where + this deployment will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment (:class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment`): + Required. The deployment to create. + This corresponds to the ``intercept_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_id (:class:`str`): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource + name. + + This corresponds to the ``intercept_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment` A deployment represents a zonal intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a zonal instance + group fronted by an internal passthrough load + balancer. Deployments are always part of a global + deployment group which represents a global intercept + service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, intercept_deployment, intercept_deployment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptDeploymentRequest): + request = intercept.CreateInterceptDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_deployment is not None: + request.intercept_deployment = intercept_deployment + if intercept_deployment_id is not None: + request.intercept_deployment_id = intercept_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_intercept_deployment( + self, + request: Optional[ + Union[intercept.UpdateInterceptDeploymentRequest, dict] + ] = None, + *, + intercept_deployment: Optional[intercept.InterceptDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a deployment. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentRequest( + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.update_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentRequest, dict]]): + The request object. Request message for + UpdateInterceptDeployment. + intercept_deployment (:class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment`): + Required. The deployment to update. + This corresponds to the ``intercept_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the deployment (e.g. + ``description``; *not* + ``intercept_deployment.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment` A deployment represents a zonal intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a zonal instance + group fronted by an internal passthrough load + balancer. Deployments are always part of a global + deployment group which represents a global intercept + service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_deployment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptDeploymentRequest): + request = intercept.UpdateInterceptDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_deployment is not None: + request.intercept_deployment = intercept_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("intercept_deployment.name", request.intercept_deployment.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intercept.InterceptDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_intercept_deployment( + self, + request: Optional[ + Union[intercept.DeleteInterceptDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a deployment. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentRequest, dict]]): + The request object. Request message for + DeleteInterceptDeployment. + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptDeploymentRequest): + request = intercept.DeleteInterceptDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "InterceptAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("InterceptAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/client.py new file mode 100644 index 000000000000..fb5b7d73184f --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/client.py @@ -0,0 +1,4324 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.intercept import pagers +from google.cloud.network_security_v1alpha1.types import common, intercept + +from .transports.base import DEFAULT_CLIENT_INFO, InterceptTransport +from .transports.grpc import InterceptGrpcTransport +from .transports.grpc_asyncio import InterceptGrpcAsyncIOTransport +from .transports.rest import InterceptRestTransport + + +class InterceptClientMeta(type): + """Metaclass for the Intercept client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[InterceptTransport]] + _transport_registry["grpc"] = InterceptGrpcTransport + _transport_registry["grpc_asyncio"] = InterceptGrpcAsyncIOTransport + _transport_registry["rest"] = InterceptRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[InterceptTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InterceptClient(metaclass=InterceptClientMeta): + """Service for Third-Party Packet Intercept (TPPI). + TPPI is the "in-band" flavor of the Network Security + Integrations product. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterceptClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterceptClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InterceptTransport: + """Returns the transport used by the client instance. + + Returns: + InterceptTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def forwarding_rule_path( + project: str, + forwarding_rule: str, + ) -> str: + """Returns a fully-qualified forwarding_rule string.""" + return "projects/{project}/global/forwardingRules/{forwarding_rule}".format( + project=project, + forwarding_rule=forwarding_rule, + ) + + @staticmethod + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: + """Parses a forwarding_rule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/forwardingRules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intercept_deployment_path( + project: str, + location: str, + intercept_deployment: str, + ) -> str: + """Returns a fully-qualified intercept_deployment string.""" + return "projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment}".format( + project=project, + location=location, + intercept_deployment=intercept_deployment, + ) + + @staticmethod + def parse_intercept_deployment_path(path: str) -> Dict[str, str]: + """Parses a intercept_deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/interceptDeployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intercept_deployment_group_path( + project: str, + location: str, + intercept_deployment_group: str, + ) -> str: + """Returns a fully-qualified intercept_deployment_group string.""" + return "projects/{project}/locations/{location}/interceptDeploymentGroups/{intercept_deployment_group}".format( + project=project, + location=location, + intercept_deployment_group=intercept_deployment_group, + ) + + @staticmethod + def parse_intercept_deployment_group_path(path: str) -> Dict[str, str]: + """Parses a intercept_deployment_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/interceptDeploymentGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intercept_endpoint_group_path( + project: str, + location: str, + intercept_endpoint_group: str, + ) -> str: + """Returns a fully-qualified intercept_endpoint_group string.""" + return "projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group}".format( + project=project, + location=location, + intercept_endpoint_group=intercept_endpoint_group, + ) + + @staticmethod + def parse_intercept_endpoint_group_path(path: str) -> Dict[str, str]: + """Parses a intercept_endpoint_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/interceptEndpointGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intercept_endpoint_group_association_path( + project: str, + location: str, + intercept_endpoint_group_association: str, + ) -> str: + """Returns a fully-qualified intercept_endpoint_group_association string.""" + return "projects/{project}/locations/{location}/interceptEndpointGroupAssociations/{intercept_endpoint_group_association}".format( + project=project, + location=location, + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + @staticmethod + def parse_intercept_endpoint_group_association_path(path: str) -> Dict[str, str]: + """Parses a intercept_endpoint_group_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/interceptEndpointGroupAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = InterceptClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = InterceptClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InterceptClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, InterceptTransport, Callable[..., InterceptTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the intercept client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InterceptTransport,Callable[..., InterceptTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterceptTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = InterceptClient._read_environment_variables() + self._client_cert_source = InterceptClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = InterceptClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, InterceptTransport) + if transport_provided: + # transport is a InterceptTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(InterceptTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or InterceptClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[InterceptTransport], Callable[..., InterceptTransport] + ] = ( + InterceptClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InterceptTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.InterceptClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "credentialsType": None, + }, + ) + + def list_intercept_endpoint_groups( + self, + request: Optional[ + Union[intercept.ListInterceptEndpointGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptEndpointGroupsPager: + r"""Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_intercept_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest, dict]): + The request object. Request message for + ListInterceptEndpointGroups. + parent (str): + Required. The parent, which owns this collection of + endpoint groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupsPager: + Response message for + ListInterceptEndpointGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptEndpointGroupsRequest): + request = intercept.ListInterceptEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_intercept_endpoint_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInterceptEndpointGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.GetInterceptEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroup: + r"""Gets a specific endpoint group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_endpoint_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupRequest, dict]): + The request object. Request message for + GetInterceptEndpointGroup. + name (str): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + intercept for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A firewall rule that references the + security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptEndpointGroupRequest): + request = intercept.GetInterceptEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.CreateInterceptEndpointGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_endpoint_group: Optional[intercept.InterceptEndpointGroup] = None, + intercept_endpoint_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.create_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupRequest, dict]): + The request object. Request message for + CreateInterceptEndpointGroup. + parent (str): + Required. The parent resource where + this endpoint group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup): + Required. The endpoint group to + create. + + This corresponds to the ``intercept_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_id (str): + Required. The ID to use for the + endpoint group, which will become the + final component of the endpoint group's + resource name. + + This corresponds to the ``intercept_endpoint_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure intercept for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A firewall rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_endpoint_group, + intercept_endpoint_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptEndpointGroupRequest): + request = intercept.CreateInterceptEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_endpoint_group is not None: + request.intercept_endpoint_group = intercept_endpoint_group + if intercept_endpoint_group_id is not None: + request.intercept_endpoint_group_id = intercept_endpoint_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.UpdateInterceptEndpointGroupRequest, dict] + ] = None, + *, + intercept_endpoint_group: Optional[intercept.InterceptEndpointGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an endpoint group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupRequest( + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.update_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupRequest, dict]): + The request object. Request message for + UpdateInterceptEndpointGroup. + intercept_endpoint_group (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup): + Required. The endpoint group to + update. + + This corresponds to the ``intercept_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the endpoint group (e.g. + ``description``; *not* + ``intercept_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure intercept for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A firewall rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_endpoint_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptEndpointGroupRequest): + request = intercept.UpdateInterceptEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_endpoint_group is not None: + request.intercept_endpoint_group = intercept_endpoint_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_endpoint_group.name", + request.intercept_endpoint_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_intercept_endpoint_group( + self, + request: Optional[ + Union[intercept.DeleteInterceptEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an endpoint group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupRequest, dict]): + The request object. Request message for + DeleteInterceptEndpointGroup. + name (str): + Required. The endpoint group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptEndpointGroupRequest): + request = intercept.DeleteInterceptEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_intercept_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_intercept_endpoint_group_associations( + self, + request: Optional[ + Union[intercept.ListInterceptEndpointGroupAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptEndpointGroupAssociationsPager: + r"""Lists associations in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_intercept_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_group_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest, dict]): + The request object. Request message for + ListInterceptEndpointGroupAssociations. + parent (str): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupAssociationsPager: + Response message for + ListInterceptEndpointGroupAssociations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.ListInterceptEndpointGroupAssociationsRequest + ): + request = intercept.ListInterceptEndpointGroupAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_intercept_endpoint_group_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInterceptEndpointGroupAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.GetInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroupAssociation: + r"""Gets a specific association. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_endpoint_group_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupAssociationRequest, dict]): + The request object. Request message for + GetInterceptEndpointGroupAssociation. + name (str): + Required. The name of the association to retrieve. + Format: + projects/{project}/locations/{location}/interceptEndpointGroupAssociations/{intercept_endpoint_group_association} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable intercept by itself. To + enable intercept, the user must also + create a network firewall policy + containing intercept rules and associate + it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.GetInterceptEndpointGroupAssociationRequest + ): + request = intercept.GetInterceptEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.CreateInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_endpoint_group_association: Optional[ + intercept.InterceptEndpointGroupAssociation + ] = None, + intercept_endpoint_group_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an association in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = network_security_v1alpha1.InterceptEndpointGroupAssociation() + intercept_endpoint_group_association.intercept_endpoint_group = "intercept_endpoint_group_value" + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.create_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupAssociationRequest, dict]): + The request object. Request message for + CreateInterceptEndpointGroupAssociation. + parent (str): + Required. The parent resource where + this association will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_association (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation): + Required. The association to create. + This corresponds to the ``intercept_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_endpoint_group_association_id (str): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's + resource name. If not provided, the + server will generate a unique ID. + + This corresponds to the ``intercept_endpoint_group_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable intercept by itself. To + enable intercept, the user must also create a network + firewall policy containing intercept rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_endpoint_group_association, + intercept_endpoint_group_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.CreateInterceptEndpointGroupAssociationRequest + ): + request = intercept.CreateInterceptEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_endpoint_group_association is not None: + request.intercept_endpoint_group_association = ( + intercept_endpoint_group_association + ) + if intercept_endpoint_group_association_id is not None: + request.intercept_endpoint_group_association_id = ( + intercept_endpoint_group_association_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.UpdateInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + intercept_endpoint_group_association: Optional[ + intercept.InterceptEndpointGroupAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an association. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = network_security_v1alpha1.InterceptEndpointGroupAssociation() + intercept_endpoint_group_association.intercept_endpoint_group = "intercept_endpoint_group_value" + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupAssociationRequest( + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.update_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupAssociationRequest, dict]): + The request object. Request message for + UpdateInterceptEndpointGroupAssociation. + intercept_endpoint_group_association (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation): + Required. The association to update. + This corresponds to the ``intercept_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the association (e.g. + ``description``; *not* + ``intercept_endpoint_group_association.description``). + See https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable intercept by itself. To + enable intercept, the user must also create a network + firewall policy containing intercept rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_endpoint_group_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.UpdateInterceptEndpointGroupAssociationRequest + ): + request = intercept.UpdateInterceptEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_endpoint_group_association is not None: + request.intercept_endpoint_group_association = ( + intercept_endpoint_group_association + ) + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_endpoint_group_association.name", + request.intercept_endpoint_group_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_intercept_endpoint_group_association( + self, + request: Optional[ + Union[intercept.DeleteInterceptEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an association. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupAssociationRequest, dict]): + The request object. Request message for + DeleteInterceptEndpointGroupAssociation. + name (str): + Required. The association to delete. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, intercept.DeleteInterceptEndpointGroupAssociationRequest + ): + request = intercept.DeleteInterceptEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_intercept_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_intercept_deployment_groups( + self, + request: Optional[ + Union[intercept.ListInterceptDeploymentGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptDeploymentGroupsPager: + r"""Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_intercept_deployment_groups(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest, dict]): + The request object. Request message for + ListInterceptDeploymentGroups. + parent (str): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentGroupsPager: + Response message for + ListInterceptDeploymentGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptDeploymentGroupsRequest): + request = intercept.ListInterceptDeploymentGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_intercept_deployment_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInterceptDeploymentGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.GetInterceptDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeploymentGroup: + r"""Gets a specific deployment group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentGroupRequest, dict]): + The request object. Request message for + GetInterceptDeploymentGroup. + name (str): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/interceptDeploymentGroups/{intercept_deployment_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup: + A deployment group aggregates many + zonal intercept backends (deployments) + into a single global intercept service. + Consumers can connect this service using + an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptDeploymentGroupRequest): + request = intercept.GetInterceptDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.CreateInterceptDeploymentGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_deployment_group: Optional[intercept.InterceptDeploymentGroup] = None, + intercept_deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.create_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentGroupRequest, dict]): + The request object. Request message for + CreateInterceptDeploymentGroup. + parent (str): + Required. The parent resource where + this deployment group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_group (google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup): + Required. The deployment group to + create. + + This corresponds to the ``intercept_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_group_id (str): + Required. The ID to use for the new + deployment group, which will become the + final component of the deployment + group's resource name. + + This corresponds to the ``intercept_deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup` A deployment group aggregates many zonal intercept backends (deployments) + into a single global intercept service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + intercept_deployment_group, + intercept_deployment_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptDeploymentGroupRequest): + request = intercept.CreateInterceptDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_deployment_group is not None: + request.intercept_deployment_group = intercept_deployment_group + if intercept_deployment_group_id is not None: + request.intercept_deployment_group_id = intercept_deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.UpdateInterceptDeploymentGroupRequest, dict] + ] = None, + *, + intercept_deployment_group: Optional[intercept.InterceptDeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a deployment group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentGroupRequest( + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.update_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentGroupRequest, dict]): + The request object. Request message for + UpdateInterceptDeploymentGroup. + intercept_deployment_group (google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup): + Required. The deployment group to + update. + + This corresponds to the ``intercept_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the deployment group (e.g. + ``description``; *not* + ``intercept_deployment_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup` A deployment group aggregates many zonal intercept backends (deployments) + into a single global intercept service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptDeploymentGroupRequest): + request = intercept.UpdateInterceptDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_deployment_group is not None: + request.intercept_deployment_group = intercept_deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "intercept_deployment_group.name", + request.intercept_deployment_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_intercept_deployment_group( + self, + request: Optional[ + Union[intercept.DeleteInterceptDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a deployment group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentGroupRequest, dict]): + The request object. Request message for + DeleteInterceptDeploymentGroup. + name (str): + Required. The deployment group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptDeploymentGroupRequest): + request = intercept.DeleteInterceptDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_intercept_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_intercept_deployments( + self, + request: Optional[ + Union[intercept.ListInterceptDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInterceptDeploymentsPager: + r"""Lists deployments in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_intercept_deployments(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest, dict]): + The request object. Request message for + ListInterceptDeployments. + parent (str): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentsPager: + Response message for + ListInterceptDeployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.ListInterceptDeploymentsRequest): + request = intercept.ListInterceptDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_intercept_deployments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInterceptDeploymentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_intercept_deployment( + self, + request: Optional[Union[intercept.GetInterceptDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeployment: + r"""Gets a specific deployment. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentRequest, dict]): + The request object. Request message for + GetInterceptDeployment. + name (str): + Required. The name of the deployment to retrieve. + Format: + projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.InterceptDeployment: + A deployment represents a zonal + intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a + zonal instance group fronted by an + internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global intercept service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.GetInterceptDeploymentRequest): + request = intercept.GetInterceptDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_intercept_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_intercept_deployment( + self, + request: Optional[ + Union[intercept.CreateInterceptDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + intercept_deployment: Optional[intercept.InterceptDeployment] = None, + intercept_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.create_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentRequest, dict]): + The request object. Request message for + CreateInterceptDeployment. + parent (str): + Required. The parent resource where + this deployment will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment (google.cloud.network_security_v1alpha1.types.InterceptDeployment): + Required. The deployment to create. + This corresponds to the ``intercept_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intercept_deployment_id (str): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource + name. + + This corresponds to the ``intercept_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment` A deployment represents a zonal intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a zonal instance + group fronted by an internal passthrough load + balancer. Deployments are always part of a global + deployment group which represents a global intercept + service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, intercept_deployment, intercept_deployment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.CreateInterceptDeploymentRequest): + request = intercept.CreateInterceptDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intercept_deployment is not None: + request.intercept_deployment = intercept_deployment + if intercept_deployment_id is not None: + request.intercept_deployment_id = intercept_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_intercept_deployment( + self, + request: Optional[ + Union[intercept.UpdateInterceptDeploymentRequest, dict] + ] = None, + *, + intercept_deployment: Optional[intercept.InterceptDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a deployment. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentRequest( + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.update_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentRequest, dict]): + The request object. Request message for + UpdateInterceptDeployment. + intercept_deployment (google.cloud.network_security_v1alpha1.types.InterceptDeployment): + Required. The deployment to update. + This corresponds to the ``intercept_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the deployment (e.g. + ``description``; *not* + ``intercept_deployment.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.InterceptDeployment` A deployment represents a zonal intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a zonal instance + group fronted by an internal passthrough load + balancer. Deployments are always part of a global + deployment group which represents a global intercept + service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [intercept_deployment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.UpdateInterceptDeploymentRequest): + request = intercept.UpdateInterceptDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intercept_deployment is not None: + request.intercept_deployment = intercept_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("intercept_deployment.name", request.intercept_deployment.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intercept.InterceptDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_intercept_deployment( + self, + request: Optional[ + Union[intercept.DeleteInterceptDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a deployment. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentRequest, dict]): + The request object. Request message for + DeleteInterceptDeployment. + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, intercept.DeleteInterceptDeploymentRequest): + request = intercept.DeleteInterceptDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_intercept_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InterceptClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("InterceptClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/pagers.py new file mode 100644 index 000000000000..500056d81836 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/pagers.py @@ -0,0 +1,677 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import intercept + + +class ListInterceptEndpointGroupsPager: + """A pager for iterating through ``list_intercept_endpoint_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``intercept_endpoint_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInterceptEndpointGroups`` requests and continue to iterate + through the ``intercept_endpoint_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., intercept.ListInterceptEndpointGroupsResponse], + request: intercept.ListInterceptEndpointGroupsRequest, + response: intercept.ListInterceptEndpointGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptEndpointGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[intercept.ListInterceptEndpointGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[intercept.InterceptEndpointGroup]: + for page in self.pages: + yield from page.intercept_endpoint_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptEndpointGroupsAsyncPager: + """A pager for iterating through ``list_intercept_endpoint_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``intercept_endpoint_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInterceptEndpointGroups`` requests and continue to iterate + through the ``intercept_endpoint_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[intercept.ListInterceptEndpointGroupsResponse]], + request: intercept.ListInterceptEndpointGroupsRequest, + response: intercept.ListInterceptEndpointGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptEndpointGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[intercept.ListInterceptEndpointGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[intercept.InterceptEndpointGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.intercept_endpoint_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptEndpointGroupAssociationsPager: + """A pager for iterating through ``list_intercept_endpoint_group_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``intercept_endpoint_group_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInterceptEndpointGroupAssociations`` requests and continue to iterate + through the ``intercept_endpoint_group_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., intercept.ListInterceptEndpointGroupAssociationsResponse], + request: intercept.ListInterceptEndpointGroupAssociationsRequest, + response: intercept.ListInterceptEndpointGroupAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptEndpointGroupAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[intercept.ListInterceptEndpointGroupAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[intercept.InterceptEndpointGroupAssociation]: + for page in self.pages: + yield from page.intercept_endpoint_group_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptEndpointGroupAssociationsAsyncPager: + """A pager for iterating through ``list_intercept_endpoint_group_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``intercept_endpoint_group_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInterceptEndpointGroupAssociations`` requests and continue to iterate + through the ``intercept_endpoint_group_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[intercept.ListInterceptEndpointGroupAssociationsResponse] + ], + request: intercept.ListInterceptEndpointGroupAssociationsRequest, + response: intercept.ListInterceptEndpointGroupAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptEndpointGroupAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[intercept.ListInterceptEndpointGroupAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[intercept.InterceptEndpointGroupAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.intercept_endpoint_group_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptDeploymentGroupsPager: + """A pager for iterating through ``list_intercept_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``intercept_deployment_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInterceptDeploymentGroups`` requests and continue to iterate + through the ``intercept_deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., intercept.ListInterceptDeploymentGroupsResponse], + request: intercept.ListInterceptDeploymentGroupsRequest, + response: intercept.ListInterceptDeploymentGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[intercept.ListInterceptDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[intercept.InterceptDeploymentGroup]: + for page in self.pages: + yield from page.intercept_deployment_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptDeploymentGroupsAsyncPager: + """A pager for iterating through ``list_intercept_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``intercept_deployment_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInterceptDeploymentGroups`` requests and continue to iterate + through the ``intercept_deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[intercept.ListInterceptDeploymentGroupsResponse] + ], + request: intercept.ListInterceptDeploymentGroupsRequest, + response: intercept.ListInterceptDeploymentGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[intercept.ListInterceptDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[intercept.InterceptDeploymentGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.intercept_deployment_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptDeploymentsPager: + """A pager for iterating through ``list_intercept_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``intercept_deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInterceptDeployments`` requests and continue to iterate + through the ``intercept_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., intercept.ListInterceptDeploymentsResponse], + request: intercept.ListInterceptDeploymentsRequest, + response: intercept.ListInterceptDeploymentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptDeploymentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[intercept.ListInterceptDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[intercept.InterceptDeployment]: + for page in self.pages: + yield from page.intercept_deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInterceptDeploymentsAsyncPager: + """A pager for iterating through ``list_intercept_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``intercept_deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInterceptDeployments`` requests and continue to iterate + through the ``intercept_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[intercept.ListInterceptDeploymentsResponse]], + request: intercept.ListInterceptDeploymentsRequest, + response: intercept.ListInterceptDeploymentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = intercept.ListInterceptDeploymentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[intercept.ListInterceptDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[intercept.InterceptDeployment]: + async def async_generator(): + async for page in self.pages: + for response in page.intercept_deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/README.rst new file mode 100644 index 000000000000..809a44febeaf --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`InterceptTransport` is the ABC for all transports. +- public child `InterceptGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `InterceptGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseInterceptRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `InterceptRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/__init__.py new file mode 100644 index 000000000000..d482e9885b44 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InterceptTransport +from .grpc import InterceptGrpcTransport +from .grpc_asyncio import InterceptGrpcAsyncIOTransport +from .rest import InterceptRestInterceptor, InterceptRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InterceptTransport]] +_transport_registry["grpc"] = InterceptGrpcTransport +_transport_registry["grpc_asyncio"] = InterceptGrpcAsyncIOTransport +_transport_registry["rest"] = InterceptRestTransport + +__all__ = ( + "InterceptTransport", + "InterceptGrpcTransport", + "InterceptGrpcAsyncIOTransport", + "InterceptRestTransport", + "InterceptRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/base.py new file mode 100644 index 000000000000..9bac99fc06fd --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/base.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import intercept + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InterceptTransport(abc.ABC): + """Abstract transport class for Intercept.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_intercept_endpoint_groups: gapic_v1.method.wrap_method( + self.list_intercept_endpoint_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_endpoint_group: gapic_v1.method.wrap_method( + self.get_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_endpoint_group: gapic_v1.method.wrap_method( + self.create_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_endpoint_group: gapic_v1.method.wrap_method( + self.update_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_endpoint_group: gapic_v1.method.wrap_method( + self.delete_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_endpoint_group_associations: gapic_v1.method.wrap_method( + self.list_intercept_endpoint_group_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_endpoint_group_association: gapic_v1.method.wrap_method( + self.get_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_endpoint_group_association: gapic_v1.method.wrap_method( + self.create_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_endpoint_group_association: gapic_v1.method.wrap_method( + self.update_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_endpoint_group_association: gapic_v1.method.wrap_method( + self.delete_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_deployment_groups: gapic_v1.method.wrap_method( + self.list_intercept_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_deployment_group: gapic_v1.method.wrap_method( + self.get_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_deployment_group: gapic_v1.method.wrap_method( + self.create_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_deployment_group: gapic_v1.method.wrap_method( + self.update_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_deployment_group: gapic_v1.method.wrap_method( + self.delete_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_deployments: gapic_v1.method.wrap_method( + self.list_intercept_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_deployment: gapic_v1.method.wrap_method( + self.get_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_deployment: gapic_v1.method.wrap_method( + self.create_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_deployment: gapic_v1.method.wrap_method( + self.update_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_deployment: gapic_v1.method.wrap_method( + self.delete_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_intercept_endpoint_groups( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupsRequest], + Union[ + intercept.ListInterceptEndpointGroupsResponse, + Awaitable[intercept.ListInterceptEndpointGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupRequest], + Union[ + intercept.InterceptEndpointGroup, + Awaitable[intercept.InterceptEndpointGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_intercept_endpoint_group_associations( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupAssociationsRequest], + Union[ + intercept.ListInterceptEndpointGroupAssociationsResponse, + Awaitable[intercept.ListInterceptEndpointGroupAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupAssociationRequest], + Union[ + intercept.InterceptEndpointGroupAssociation, + Awaitable[intercept.InterceptEndpointGroupAssociation], + ], + ]: + raise NotImplementedError() + + @property + def create_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_intercept_deployment_groups( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentGroupsRequest], + Union[ + intercept.ListInterceptDeploymentGroupsResponse, + Awaitable[intercept.ListInterceptDeploymentGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentGroupRequest], + Union[ + intercept.InterceptDeploymentGroup, + Awaitable[intercept.InterceptDeploymentGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_intercept_deployments( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentsRequest], + Union[ + intercept.ListInterceptDeploymentsResponse, + Awaitable[intercept.ListInterceptDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_intercept_deployment( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentRequest], + Union[intercept.InterceptDeployment, Awaitable[intercept.InterceptDeployment]], + ]: + raise NotImplementedError() + + @property + def create_intercept_deployment( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_intercept_deployment( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_intercept_deployment( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("InterceptTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc.py new file mode 100644 index 000000000000..3245bd4b5a21 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc.py @@ -0,0 +1,1180 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import intercept + +from .base import DEFAULT_CLIENT_INFO, InterceptTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InterceptGrpcTransport(InterceptTransport): + """gRPC backend transport for Intercept. + + Service for Third-Party Packet Intercept (TPPI). + TPPI is the "in-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_intercept_endpoint_groups( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupsRequest], + intercept.ListInterceptEndpointGroupsResponse, + ]: + r"""Return a callable for the list intercept endpoint groups method over gRPC. + + Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptEndpointGroupsRequest], + ~.ListInterceptEndpointGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_endpoint_groups" not in self._stubs: + self._stubs[ + "list_intercept_endpoint_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptEndpointGroups", + request_serializer=intercept.ListInterceptEndpointGroupsRequest.serialize, + response_deserializer=intercept.ListInterceptEndpointGroupsResponse.deserialize, + ) + return self._stubs["list_intercept_endpoint_groups"] + + @property + def get_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupRequest], intercept.InterceptEndpointGroup + ]: + r"""Return a callable for the get intercept endpoint group method over gRPC. + + Gets a specific endpoint group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptEndpointGroupRequest], + ~.InterceptEndpointGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "get_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptEndpointGroup", + request_serializer=intercept.GetInterceptEndpointGroupRequest.serialize, + response_deserializer=intercept.InterceptEndpointGroup.deserialize, + ) + return self._stubs["get_intercept_endpoint_group"] + + @property + def create_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create intercept endpoint + group method over gRPC. + + Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "create_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptEndpointGroup", + request_serializer=intercept.CreateInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_endpoint_group"] + + @property + def update_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update intercept endpoint + group method over gRPC. + + Updates an endpoint group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "update_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptEndpointGroup", + request_serializer=intercept.UpdateInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_endpoint_group"] + + @property + def delete_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete intercept endpoint + group method over gRPC. + + Deletes an endpoint group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "delete_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptEndpointGroup", + request_serializer=intercept.DeleteInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_endpoint_group"] + + @property + def list_intercept_endpoint_group_associations( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupAssociationsRequest], + intercept.ListInterceptEndpointGroupAssociationsResponse, + ]: + r"""Return a callable for the list intercept endpoint group + associations method over gRPC. + + Lists associations in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptEndpointGroupAssociationsRequest], + ~.ListInterceptEndpointGroupAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_endpoint_group_associations" not in self._stubs: + self._stubs[ + "list_intercept_endpoint_group_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptEndpointGroupAssociations", + request_serializer=intercept.ListInterceptEndpointGroupAssociationsRequest.serialize, + response_deserializer=intercept.ListInterceptEndpointGroupAssociationsResponse.deserialize, + ) + return self._stubs["list_intercept_endpoint_group_associations"] + + @property + def get_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupAssociationRequest], + intercept.InterceptEndpointGroupAssociation, + ]: + r"""Return a callable for the get intercept endpoint group + association method over gRPC. + + Gets a specific association. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptEndpointGroupAssociationRequest], + ~.InterceptEndpointGroupAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "get_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptEndpointGroupAssociation", + request_serializer=intercept.GetInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=intercept.InterceptEndpointGroupAssociation.deserialize, + ) + return self._stubs["get_intercept_endpoint_group_association"] + + @property + def create_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create intercept endpoint + group association method over gRPC. + + Creates an association in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "create_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptEndpointGroupAssociation", + request_serializer=intercept.CreateInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_endpoint_group_association"] + + @property + def update_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update intercept endpoint + group association method over gRPC. + + Updates an association. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "update_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptEndpointGroupAssociation", + request_serializer=intercept.UpdateInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_endpoint_group_association"] + + @property + def delete_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete intercept endpoint + group association method over gRPC. + + Deletes an association. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "delete_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptEndpointGroupAssociation", + request_serializer=intercept.DeleteInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_endpoint_group_association"] + + @property + def list_intercept_deployment_groups( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentGroupsRequest], + intercept.ListInterceptDeploymentGroupsResponse, + ]: + r"""Return a callable for the list intercept deployment + groups method over gRPC. + + Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptDeploymentGroupsRequest], + ~.ListInterceptDeploymentGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_deployment_groups" not in self._stubs: + self._stubs[ + "list_intercept_deployment_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptDeploymentGroups", + request_serializer=intercept.ListInterceptDeploymentGroupsRequest.serialize, + response_deserializer=intercept.ListInterceptDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_intercept_deployment_groups"] + + @property + def get_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentGroupRequest], + intercept.InterceptDeploymentGroup, + ]: + r"""Return a callable for the get intercept deployment group method over gRPC. + + Gets a specific deployment group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptDeploymentGroupRequest], + ~.InterceptDeploymentGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_deployment_group" not in self._stubs: + self._stubs[ + "get_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptDeploymentGroup", + request_serializer=intercept.GetInterceptDeploymentGroupRequest.serialize, + response_deserializer=intercept.InterceptDeploymentGroup.deserialize, + ) + return self._stubs["get_intercept_deployment_group"] + + @property + def create_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create intercept deployment + group method over gRPC. + + Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_deployment_group" not in self._stubs: + self._stubs[ + "create_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptDeploymentGroup", + request_serializer=intercept.CreateInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_deployment_group"] + + @property + def update_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update intercept deployment + group method over gRPC. + + Updates a deployment group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_deployment_group" not in self._stubs: + self._stubs[ + "update_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptDeploymentGroup", + request_serializer=intercept.UpdateInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_deployment_group"] + + @property + def delete_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete intercept deployment + group method over gRPC. + + Deletes a deployment group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_deployment_group" not in self._stubs: + self._stubs[ + "delete_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptDeploymentGroup", + request_serializer=intercept.DeleteInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_deployment_group"] + + @property + def list_intercept_deployments( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentsRequest], + intercept.ListInterceptDeploymentsResponse, + ]: + r"""Return a callable for the list intercept deployments method over gRPC. + + Lists deployments in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptDeploymentsRequest], + ~.ListInterceptDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_deployments" not in self._stubs: + self._stubs[ + "list_intercept_deployments" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptDeployments", + request_serializer=intercept.ListInterceptDeploymentsRequest.serialize, + response_deserializer=intercept.ListInterceptDeploymentsResponse.deserialize, + ) + return self._stubs["list_intercept_deployments"] + + @property + def get_intercept_deployment( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentRequest], intercept.InterceptDeployment + ]: + r"""Return a callable for the get intercept deployment method over gRPC. + + Gets a specific deployment. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptDeploymentRequest], + ~.InterceptDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_deployment" not in self._stubs: + self._stubs["get_intercept_deployment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptDeployment", + request_serializer=intercept.GetInterceptDeploymentRequest.serialize, + response_deserializer=intercept.InterceptDeployment.deserialize, + ) + return self._stubs["get_intercept_deployment"] + + @property + def create_intercept_deployment( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create intercept deployment method over gRPC. + + Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_deployment" not in self._stubs: + self._stubs[ + "create_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptDeployment", + request_serializer=intercept.CreateInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_deployment"] + + @property + def update_intercept_deployment( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update intercept deployment method over gRPC. + + Updates a deployment. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_deployment" not in self._stubs: + self._stubs[ + "update_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptDeployment", + request_serializer=intercept.UpdateInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_deployment"] + + @property + def delete_intercept_deployment( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete intercept deployment method over gRPC. + + Deletes a deployment. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_deployment" not in self._stubs: + self._stubs[ + "delete_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptDeployment", + request_serializer=intercept.DeleteInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_deployment"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("InterceptGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d8345c26ea9f --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/grpc_asyncio.py @@ -0,0 +1,1354 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import intercept + +from .base import DEFAULT_CLIENT_INFO, InterceptTransport +from .grpc import InterceptGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InterceptGrpcAsyncIOTransport(InterceptTransport): + """gRPC AsyncIO backend transport for Intercept. + + Service for Third-Party Packet Intercept (TPPI). + TPPI is the "in-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_intercept_endpoint_groups( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupsRequest], + Awaitable[intercept.ListInterceptEndpointGroupsResponse], + ]: + r"""Return a callable for the list intercept endpoint groups method over gRPC. + + Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptEndpointGroupsRequest], + Awaitable[~.ListInterceptEndpointGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_endpoint_groups" not in self._stubs: + self._stubs[ + "list_intercept_endpoint_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptEndpointGroups", + request_serializer=intercept.ListInterceptEndpointGroupsRequest.serialize, + response_deserializer=intercept.ListInterceptEndpointGroupsResponse.deserialize, + ) + return self._stubs["list_intercept_endpoint_groups"] + + @property + def get_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupRequest], + Awaitable[intercept.InterceptEndpointGroup], + ]: + r"""Return a callable for the get intercept endpoint group method over gRPC. + + Gets a specific endpoint group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptEndpointGroupRequest], + Awaitable[~.InterceptEndpointGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "get_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptEndpointGroup", + request_serializer=intercept.GetInterceptEndpointGroupRequest.serialize, + response_deserializer=intercept.InterceptEndpointGroup.deserialize, + ) + return self._stubs["get_intercept_endpoint_group"] + + @property + def create_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create intercept endpoint + group method over gRPC. + + Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "create_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptEndpointGroup", + request_serializer=intercept.CreateInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_endpoint_group"] + + @property + def update_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update intercept endpoint + group method over gRPC. + + Updates an endpoint group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "update_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptEndpointGroup", + request_serializer=intercept.UpdateInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_endpoint_group"] + + @property + def delete_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete intercept endpoint + group method over gRPC. + + Deletes an endpoint group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_endpoint_group" not in self._stubs: + self._stubs[ + "delete_intercept_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptEndpointGroup", + request_serializer=intercept.DeleteInterceptEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_endpoint_group"] + + @property + def list_intercept_endpoint_group_associations( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupAssociationsRequest], + Awaitable[intercept.ListInterceptEndpointGroupAssociationsResponse], + ]: + r"""Return a callable for the list intercept endpoint group + associations method over gRPC. + + Lists associations in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptEndpointGroupAssociationsRequest], + Awaitable[~.ListInterceptEndpointGroupAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_endpoint_group_associations" not in self._stubs: + self._stubs[ + "list_intercept_endpoint_group_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptEndpointGroupAssociations", + request_serializer=intercept.ListInterceptEndpointGroupAssociationsRequest.serialize, + response_deserializer=intercept.ListInterceptEndpointGroupAssociationsResponse.deserialize, + ) + return self._stubs["list_intercept_endpoint_group_associations"] + + @property + def get_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupAssociationRequest], + Awaitable[intercept.InterceptEndpointGroupAssociation], + ]: + r"""Return a callable for the get intercept endpoint group + association method over gRPC. + + Gets a specific association. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptEndpointGroupAssociationRequest], + Awaitable[~.InterceptEndpointGroupAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "get_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptEndpointGroupAssociation", + request_serializer=intercept.GetInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=intercept.InterceptEndpointGroupAssociation.deserialize, + ) + return self._stubs["get_intercept_endpoint_group_association"] + + @property + def create_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create intercept endpoint + group association method over gRPC. + + Creates an association in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "create_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptEndpointGroupAssociation", + request_serializer=intercept.CreateInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_endpoint_group_association"] + + @property + def update_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update intercept endpoint + group association method over gRPC. + + Updates an association. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "update_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptEndpointGroupAssociation", + request_serializer=intercept.UpdateInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_endpoint_group_association"] + + @property + def delete_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete intercept endpoint + group association method over gRPC. + + Deletes an association. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_endpoint_group_association" not in self._stubs: + self._stubs[ + "delete_intercept_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptEndpointGroupAssociation", + request_serializer=intercept.DeleteInterceptEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_endpoint_group_association"] + + @property + def list_intercept_deployment_groups( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentGroupsRequest], + Awaitable[intercept.ListInterceptDeploymentGroupsResponse], + ]: + r"""Return a callable for the list intercept deployment + groups method over gRPC. + + Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptDeploymentGroupsRequest], + Awaitable[~.ListInterceptDeploymentGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_deployment_groups" not in self._stubs: + self._stubs[ + "list_intercept_deployment_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptDeploymentGroups", + request_serializer=intercept.ListInterceptDeploymentGroupsRequest.serialize, + response_deserializer=intercept.ListInterceptDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_intercept_deployment_groups"] + + @property + def get_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentGroupRequest], + Awaitable[intercept.InterceptDeploymentGroup], + ]: + r"""Return a callable for the get intercept deployment group method over gRPC. + + Gets a specific deployment group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptDeploymentGroupRequest], + Awaitable[~.InterceptDeploymentGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_deployment_group" not in self._stubs: + self._stubs[ + "get_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptDeploymentGroup", + request_serializer=intercept.GetInterceptDeploymentGroupRequest.serialize, + response_deserializer=intercept.InterceptDeploymentGroup.deserialize, + ) + return self._stubs["get_intercept_deployment_group"] + + @property + def create_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create intercept deployment + group method over gRPC. + + Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_deployment_group" not in self._stubs: + self._stubs[ + "create_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptDeploymentGroup", + request_serializer=intercept.CreateInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_deployment_group"] + + @property + def update_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update intercept deployment + group method over gRPC. + + Updates a deployment group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_deployment_group" not in self._stubs: + self._stubs[ + "update_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptDeploymentGroup", + request_serializer=intercept.UpdateInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_deployment_group"] + + @property + def delete_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete intercept deployment + group method over gRPC. + + Deletes a deployment group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_deployment_group" not in self._stubs: + self._stubs[ + "delete_intercept_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptDeploymentGroup", + request_serializer=intercept.DeleteInterceptDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_deployment_group"] + + @property + def list_intercept_deployments( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentsRequest], + Awaitable[intercept.ListInterceptDeploymentsResponse], + ]: + r"""Return a callable for the list intercept deployments method over gRPC. + + Lists deployments in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListInterceptDeploymentsRequest], + Awaitable[~.ListInterceptDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intercept_deployments" not in self._stubs: + self._stubs[ + "list_intercept_deployments" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/ListInterceptDeployments", + request_serializer=intercept.ListInterceptDeploymentsRequest.serialize, + response_deserializer=intercept.ListInterceptDeploymentsResponse.deserialize, + ) + return self._stubs["list_intercept_deployments"] + + @property + def get_intercept_deployment( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentRequest], + Awaitable[intercept.InterceptDeployment], + ]: + r"""Return a callable for the get intercept deployment method over gRPC. + + Gets a specific deployment. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetInterceptDeploymentRequest], + Awaitable[~.InterceptDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intercept_deployment" not in self._stubs: + self._stubs["get_intercept_deployment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/GetInterceptDeployment", + request_serializer=intercept.GetInterceptDeploymentRequest.serialize, + response_deserializer=intercept.InterceptDeployment.deserialize, + ) + return self._stubs["get_intercept_deployment"] + + @property + def create_intercept_deployment( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create intercept deployment method over gRPC. + + Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateInterceptDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intercept_deployment" not in self._stubs: + self._stubs[ + "create_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/CreateInterceptDeployment", + request_serializer=intercept.CreateInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_intercept_deployment"] + + @property + def update_intercept_deployment( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update intercept deployment method over gRPC. + + Updates a deployment. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateInterceptDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intercept_deployment" not in self._stubs: + self._stubs[ + "update_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/UpdateInterceptDeployment", + request_serializer=intercept.UpdateInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_intercept_deployment"] + + @property + def delete_intercept_deployment( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete intercept deployment method over gRPC. + + Deletes a deployment. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteInterceptDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intercept_deployment" not in self._stubs: + self._stubs[ + "delete_intercept_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Intercept/DeleteInterceptDeployment", + request_serializer=intercept.DeleteInterceptDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_intercept_deployment"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_intercept_endpoint_groups: self._wrap_method( + self.list_intercept_endpoint_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_endpoint_group: self._wrap_method( + self.get_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_endpoint_group: self._wrap_method( + self.create_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_endpoint_group: self._wrap_method( + self.update_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_endpoint_group: self._wrap_method( + self.delete_intercept_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_endpoint_group_associations: self._wrap_method( + self.list_intercept_endpoint_group_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_endpoint_group_association: self._wrap_method( + self.get_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_endpoint_group_association: self._wrap_method( + self.create_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_endpoint_group_association: self._wrap_method( + self.update_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_endpoint_group_association: self._wrap_method( + self.delete_intercept_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_deployment_groups: self._wrap_method( + self.list_intercept_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_deployment_group: self._wrap_method( + self.get_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_deployment_group: self._wrap_method( + self.create_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_deployment_group: self._wrap_method( + self.update_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_deployment_group: self._wrap_method( + self.delete_intercept_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_intercept_deployments: self._wrap_method( + self.list_intercept_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_intercept_deployment: self._wrap_method( + self.get_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.create_intercept_deployment: self._wrap_method( + self.create_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_intercept_deployment: self._wrap_method( + self.update_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.delete_intercept_deployment: self._wrap_method( + self.delete_intercept_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("InterceptGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest.py new file mode 100644 index 000000000000..2dc3a9383234 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest.py @@ -0,0 +1,6273 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import intercept + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseInterceptRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InterceptRestInterceptor: + """Interceptor for Intercept. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InterceptRestTransport. + + .. code-block:: python + class MyCustomInterceptInterceptor(InterceptRestInterceptor): + def pre_create_intercept_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_intercept_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_intercept_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_intercept_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_intercept_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_intercept_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_intercept_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_intercept_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_intercept_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_intercept_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_intercept_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_intercept_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_intercept_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_intercept_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_intercept_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_intercept_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_intercept_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_intercept_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_intercept_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_intercept_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_intercept_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_intercept_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_intercept_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_intercept_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_intercept_deployment_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_intercept_deployment_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_intercept_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_intercept_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_intercept_endpoint_group_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_intercept_endpoint_group_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_intercept_endpoint_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_intercept_endpoint_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_intercept_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_intercept_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_intercept_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_intercept_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_intercept_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_intercept_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_intercept_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_intercept_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InterceptRestTransport(interceptor=MyCustomInterceptInterceptor()) + client = InterceptClient(transport=transport) + + + """ + + def pre_create_intercept_deployment( + self, + request: intercept.CreateInterceptDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.CreateInterceptDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_intercept_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_create_intercept_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_intercept_deployment + + DEPRECATED. Please use the `post_create_intercept_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_create_intercept_deployment` interceptor runs + before the `post_create_intercept_deployment_with_metadata` interceptor. + """ + return response + + def post_create_intercept_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intercept_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_create_intercept_deployment_with_metadata` + interceptor in new development instead of the `post_create_intercept_deployment` interceptor. + When both interceptors are used, this `post_create_intercept_deployment_with_metadata` interceptor runs after the + `post_create_intercept_deployment` interceptor. The (possibly modified) response returned by + `post_create_intercept_deployment` will be passed to + `post_create_intercept_deployment_with_metadata`. + """ + return response, metadata + + def pre_create_intercept_deployment_group( + self, + request: intercept.CreateInterceptDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.CreateInterceptDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_intercept_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_create_intercept_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_intercept_deployment_group + + DEPRECATED. Please use the `post_create_intercept_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_create_intercept_deployment_group` interceptor runs + before the `post_create_intercept_deployment_group_with_metadata` interceptor. + """ + return response + + def post_create_intercept_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intercept_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_create_intercept_deployment_group_with_metadata` + interceptor in new development instead of the `post_create_intercept_deployment_group` interceptor. + When both interceptors are used, this `post_create_intercept_deployment_group_with_metadata` interceptor runs after the + `post_create_intercept_deployment_group` interceptor. The (possibly modified) response returned by + `post_create_intercept_deployment_group` will be passed to + `post_create_intercept_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_create_intercept_endpoint_group( + self, + request: intercept.CreateInterceptEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.CreateInterceptEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_intercept_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_create_intercept_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_intercept_endpoint_group + + DEPRECATED. Please use the `post_create_intercept_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_create_intercept_endpoint_group` interceptor runs + before the `post_create_intercept_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_create_intercept_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intercept_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_create_intercept_endpoint_group_with_metadata` + interceptor in new development instead of the `post_create_intercept_endpoint_group` interceptor. + When both interceptors are used, this `post_create_intercept_endpoint_group_with_metadata` interceptor runs after the + `post_create_intercept_endpoint_group` interceptor. The (possibly modified) response returned by + `post_create_intercept_endpoint_group` will be passed to + `post_create_intercept_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_create_intercept_endpoint_group_association( + self, + request: intercept.CreateInterceptEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.CreateInterceptEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_intercept_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_create_intercept_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_intercept_endpoint_group_association + + DEPRECATED. Please use the `post_create_intercept_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_create_intercept_endpoint_group_association` interceptor runs + before the `post_create_intercept_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_create_intercept_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intercept_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_create_intercept_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_create_intercept_endpoint_group_association` interceptor. + When both interceptors are used, this `post_create_intercept_endpoint_group_association_with_metadata` interceptor runs after the + `post_create_intercept_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_create_intercept_endpoint_group_association` will be passed to + `post_create_intercept_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_delete_intercept_deployment( + self, + request: intercept.DeleteInterceptDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.DeleteInterceptDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_intercept_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_delete_intercept_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_intercept_deployment + + DEPRECATED. Please use the `post_delete_intercept_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_delete_intercept_deployment` interceptor runs + before the `post_delete_intercept_deployment_with_metadata` interceptor. + """ + return response + + def post_delete_intercept_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_intercept_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_delete_intercept_deployment_with_metadata` + interceptor in new development instead of the `post_delete_intercept_deployment` interceptor. + When both interceptors are used, this `post_delete_intercept_deployment_with_metadata` interceptor runs after the + `post_delete_intercept_deployment` interceptor. The (possibly modified) response returned by + `post_delete_intercept_deployment` will be passed to + `post_delete_intercept_deployment_with_metadata`. + """ + return response, metadata + + def pre_delete_intercept_deployment_group( + self, + request: intercept.DeleteInterceptDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.DeleteInterceptDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_intercept_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_delete_intercept_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_intercept_deployment_group + + DEPRECATED. Please use the `post_delete_intercept_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_delete_intercept_deployment_group` interceptor runs + before the `post_delete_intercept_deployment_group_with_metadata` interceptor. + """ + return response + + def post_delete_intercept_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_intercept_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_delete_intercept_deployment_group_with_metadata` + interceptor in new development instead of the `post_delete_intercept_deployment_group` interceptor. + When both interceptors are used, this `post_delete_intercept_deployment_group_with_metadata` interceptor runs after the + `post_delete_intercept_deployment_group` interceptor. The (possibly modified) response returned by + `post_delete_intercept_deployment_group` will be passed to + `post_delete_intercept_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_delete_intercept_endpoint_group( + self, + request: intercept.DeleteInterceptEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.DeleteInterceptEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_intercept_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_delete_intercept_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_intercept_endpoint_group + + DEPRECATED. Please use the `post_delete_intercept_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_delete_intercept_endpoint_group` interceptor runs + before the `post_delete_intercept_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_delete_intercept_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_intercept_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_delete_intercept_endpoint_group_with_metadata` + interceptor in new development instead of the `post_delete_intercept_endpoint_group` interceptor. + When both interceptors are used, this `post_delete_intercept_endpoint_group_with_metadata` interceptor runs after the + `post_delete_intercept_endpoint_group` interceptor. The (possibly modified) response returned by + `post_delete_intercept_endpoint_group` will be passed to + `post_delete_intercept_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_delete_intercept_endpoint_group_association( + self, + request: intercept.DeleteInterceptEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.DeleteInterceptEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_intercept_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_delete_intercept_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_intercept_endpoint_group_association + + DEPRECATED. Please use the `post_delete_intercept_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_delete_intercept_endpoint_group_association` interceptor runs + before the `post_delete_intercept_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_delete_intercept_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_intercept_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_delete_intercept_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_delete_intercept_endpoint_group_association` interceptor. + When both interceptors are used, this `post_delete_intercept_endpoint_group_association_with_metadata` interceptor runs after the + `post_delete_intercept_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_delete_intercept_endpoint_group_association` will be passed to + `post_delete_intercept_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_get_intercept_deployment( + self, + request: intercept.GetInterceptDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.GetInterceptDeploymentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_intercept_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_intercept_deployment( + self, response: intercept.InterceptDeployment + ) -> intercept.InterceptDeployment: + """Post-rpc interceptor for get_intercept_deployment + + DEPRECATED. Please use the `post_get_intercept_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_get_intercept_deployment` interceptor runs + before the `post_get_intercept_deployment_with_metadata` interceptor. + """ + return response + + def post_get_intercept_deployment_with_metadata( + self, + response: intercept.InterceptDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[intercept.InterceptDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_intercept_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_get_intercept_deployment_with_metadata` + interceptor in new development instead of the `post_get_intercept_deployment` interceptor. + When both interceptors are used, this `post_get_intercept_deployment_with_metadata` interceptor runs after the + `post_get_intercept_deployment` interceptor. The (possibly modified) response returned by + `post_get_intercept_deployment` will be passed to + `post_get_intercept_deployment_with_metadata`. + """ + return response, metadata + + def pre_get_intercept_deployment_group( + self, + request: intercept.GetInterceptDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.GetInterceptDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_intercept_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_intercept_deployment_group( + self, response: intercept.InterceptDeploymentGroup + ) -> intercept.InterceptDeploymentGroup: + """Post-rpc interceptor for get_intercept_deployment_group + + DEPRECATED. Please use the `post_get_intercept_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_get_intercept_deployment_group` interceptor runs + before the `post_get_intercept_deployment_group_with_metadata` interceptor. + """ + return response + + def post_get_intercept_deployment_group_with_metadata( + self, + response: intercept.InterceptDeploymentGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.InterceptDeploymentGroup, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_intercept_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_get_intercept_deployment_group_with_metadata` + interceptor in new development instead of the `post_get_intercept_deployment_group` interceptor. + When both interceptors are used, this `post_get_intercept_deployment_group_with_metadata` interceptor runs after the + `post_get_intercept_deployment_group` interceptor. The (possibly modified) response returned by + `post_get_intercept_deployment_group` will be passed to + `post_get_intercept_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_get_intercept_endpoint_group( + self, + request: intercept.GetInterceptEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.GetInterceptEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_intercept_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_intercept_endpoint_group( + self, response: intercept.InterceptEndpointGroup + ) -> intercept.InterceptEndpointGroup: + """Post-rpc interceptor for get_intercept_endpoint_group + + DEPRECATED. Please use the `post_get_intercept_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_get_intercept_endpoint_group` interceptor runs + before the `post_get_intercept_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_get_intercept_endpoint_group_with_metadata( + self, + response: intercept.InterceptEndpointGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.InterceptEndpointGroup, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_intercept_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_get_intercept_endpoint_group_with_metadata` + interceptor in new development instead of the `post_get_intercept_endpoint_group` interceptor. + When both interceptors are used, this `post_get_intercept_endpoint_group_with_metadata` interceptor runs after the + `post_get_intercept_endpoint_group` interceptor. The (possibly modified) response returned by + `post_get_intercept_endpoint_group` will be passed to + `post_get_intercept_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_get_intercept_endpoint_group_association( + self, + request: intercept.GetInterceptEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.GetInterceptEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_intercept_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_intercept_endpoint_group_association( + self, response: intercept.InterceptEndpointGroupAssociation + ) -> intercept.InterceptEndpointGroupAssociation: + """Post-rpc interceptor for get_intercept_endpoint_group_association + + DEPRECATED. Please use the `post_get_intercept_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_get_intercept_endpoint_group_association` interceptor runs + before the `post_get_intercept_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_get_intercept_endpoint_group_association_with_metadata( + self, + response: intercept.InterceptEndpointGroupAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.InterceptEndpointGroupAssociation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_intercept_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_get_intercept_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_get_intercept_endpoint_group_association` interceptor. + When both interceptors are used, this `post_get_intercept_endpoint_group_association_with_metadata` interceptor runs after the + `post_get_intercept_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_get_intercept_endpoint_group_association` will be passed to + `post_get_intercept_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_list_intercept_deployment_groups( + self, + request: intercept.ListInterceptDeploymentGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptDeploymentGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_intercept_deployment_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_intercept_deployment_groups( + self, response: intercept.ListInterceptDeploymentGroupsResponse + ) -> intercept.ListInterceptDeploymentGroupsResponse: + """Post-rpc interceptor for list_intercept_deployment_groups + + DEPRECATED. Please use the `post_list_intercept_deployment_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_list_intercept_deployment_groups` interceptor runs + before the `post_list_intercept_deployment_groups_with_metadata` interceptor. + """ + return response + + def post_list_intercept_deployment_groups_with_metadata( + self, + response: intercept.ListInterceptDeploymentGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptDeploymentGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_intercept_deployment_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_list_intercept_deployment_groups_with_metadata` + interceptor in new development instead of the `post_list_intercept_deployment_groups` interceptor. + When both interceptors are used, this `post_list_intercept_deployment_groups_with_metadata` interceptor runs after the + `post_list_intercept_deployment_groups` interceptor. The (possibly modified) response returned by + `post_list_intercept_deployment_groups` will be passed to + `post_list_intercept_deployment_groups_with_metadata`. + """ + return response, metadata + + def pre_list_intercept_deployments( + self, + request: intercept.ListInterceptDeploymentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptDeploymentsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_intercept_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_intercept_deployments( + self, response: intercept.ListInterceptDeploymentsResponse + ) -> intercept.ListInterceptDeploymentsResponse: + """Post-rpc interceptor for list_intercept_deployments + + DEPRECATED. Please use the `post_list_intercept_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_list_intercept_deployments` interceptor runs + before the `post_list_intercept_deployments_with_metadata` interceptor. + """ + return response + + def post_list_intercept_deployments_with_metadata( + self, + response: intercept.ListInterceptDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptDeploymentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_intercept_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_list_intercept_deployments_with_metadata` + interceptor in new development instead of the `post_list_intercept_deployments` interceptor. + When both interceptors are used, this `post_list_intercept_deployments_with_metadata` interceptor runs after the + `post_list_intercept_deployments` interceptor. The (possibly modified) response returned by + `post_list_intercept_deployments` will be passed to + `post_list_intercept_deployments_with_metadata`. + """ + return response, metadata + + def pre_list_intercept_endpoint_group_associations( + self, + request: intercept.ListInterceptEndpointGroupAssociationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptEndpointGroupAssociationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_intercept_endpoint_group_associations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_intercept_endpoint_group_associations( + self, response: intercept.ListInterceptEndpointGroupAssociationsResponse + ) -> intercept.ListInterceptEndpointGroupAssociationsResponse: + """Post-rpc interceptor for list_intercept_endpoint_group_associations + + DEPRECATED. Please use the `post_list_intercept_endpoint_group_associations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_list_intercept_endpoint_group_associations` interceptor runs + before the `post_list_intercept_endpoint_group_associations_with_metadata` interceptor. + """ + return response + + def post_list_intercept_endpoint_group_associations_with_metadata( + self, + response: intercept.ListInterceptEndpointGroupAssociationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptEndpointGroupAssociationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_intercept_endpoint_group_associations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_list_intercept_endpoint_group_associations_with_metadata` + interceptor in new development instead of the `post_list_intercept_endpoint_group_associations` interceptor. + When both interceptors are used, this `post_list_intercept_endpoint_group_associations_with_metadata` interceptor runs after the + `post_list_intercept_endpoint_group_associations` interceptor. The (possibly modified) response returned by + `post_list_intercept_endpoint_group_associations` will be passed to + `post_list_intercept_endpoint_group_associations_with_metadata`. + """ + return response, metadata + + def pre_list_intercept_endpoint_groups( + self, + request: intercept.ListInterceptEndpointGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptEndpointGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_intercept_endpoint_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_intercept_endpoint_groups( + self, response: intercept.ListInterceptEndpointGroupsResponse + ) -> intercept.ListInterceptEndpointGroupsResponse: + """Post-rpc interceptor for list_intercept_endpoint_groups + + DEPRECATED. Please use the `post_list_intercept_endpoint_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_list_intercept_endpoint_groups` interceptor runs + before the `post_list_intercept_endpoint_groups_with_metadata` interceptor. + """ + return response + + def post_list_intercept_endpoint_groups_with_metadata( + self, + response: intercept.ListInterceptEndpointGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.ListInterceptEndpointGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_intercept_endpoint_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_list_intercept_endpoint_groups_with_metadata` + interceptor in new development instead of the `post_list_intercept_endpoint_groups` interceptor. + When both interceptors are used, this `post_list_intercept_endpoint_groups_with_metadata` interceptor runs after the + `post_list_intercept_endpoint_groups` interceptor. The (possibly modified) response returned by + `post_list_intercept_endpoint_groups` will be passed to + `post_list_intercept_endpoint_groups_with_metadata`. + """ + return response, metadata + + def pre_update_intercept_deployment( + self, + request: intercept.UpdateInterceptDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.UpdateInterceptDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_intercept_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_update_intercept_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_intercept_deployment + + DEPRECATED. Please use the `post_update_intercept_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_update_intercept_deployment` interceptor runs + before the `post_update_intercept_deployment_with_metadata` interceptor. + """ + return response + + def post_update_intercept_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intercept_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_update_intercept_deployment_with_metadata` + interceptor in new development instead of the `post_update_intercept_deployment` interceptor. + When both interceptors are used, this `post_update_intercept_deployment_with_metadata` interceptor runs after the + `post_update_intercept_deployment` interceptor. The (possibly modified) response returned by + `post_update_intercept_deployment` will be passed to + `post_update_intercept_deployment_with_metadata`. + """ + return response, metadata + + def pre_update_intercept_deployment_group( + self, + request: intercept.UpdateInterceptDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.UpdateInterceptDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_intercept_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_update_intercept_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_intercept_deployment_group + + DEPRECATED. Please use the `post_update_intercept_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_update_intercept_deployment_group` interceptor runs + before the `post_update_intercept_deployment_group_with_metadata` interceptor. + """ + return response + + def post_update_intercept_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intercept_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_update_intercept_deployment_group_with_metadata` + interceptor in new development instead of the `post_update_intercept_deployment_group` interceptor. + When both interceptors are used, this `post_update_intercept_deployment_group_with_metadata` interceptor runs after the + `post_update_intercept_deployment_group` interceptor. The (possibly modified) response returned by + `post_update_intercept_deployment_group` will be passed to + `post_update_intercept_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_update_intercept_endpoint_group( + self, + request: intercept.UpdateInterceptEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.UpdateInterceptEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_intercept_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_update_intercept_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_intercept_endpoint_group + + DEPRECATED. Please use the `post_update_intercept_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_update_intercept_endpoint_group` interceptor runs + before the `post_update_intercept_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_update_intercept_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intercept_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_update_intercept_endpoint_group_with_metadata` + interceptor in new development instead of the `post_update_intercept_endpoint_group` interceptor. + When both interceptors are used, this `post_update_intercept_endpoint_group_with_metadata` interceptor runs after the + `post_update_intercept_endpoint_group` interceptor. The (possibly modified) response returned by + `post_update_intercept_endpoint_group` will be passed to + `post_update_intercept_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_update_intercept_endpoint_group_association( + self, + request: intercept.UpdateInterceptEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + intercept.UpdateInterceptEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_intercept_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_update_intercept_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_intercept_endpoint_group_association + + DEPRECATED. Please use the `post_update_intercept_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. This `post_update_intercept_endpoint_group_association` interceptor runs + before the `post_update_intercept_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_update_intercept_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intercept_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intercept server but before it is returned to user code. + + We recommend only using this `post_update_intercept_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_update_intercept_endpoint_group_association` interceptor. + When both interceptors are used, this `post_update_intercept_endpoint_group_association_with_metadata` interceptor runs after the + `post_update_intercept_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_update_intercept_endpoint_group_association` will be passed to + `post_update_intercept_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intercept server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Intercept server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InterceptRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InterceptRestInterceptor + + +class InterceptRestTransport(_BaseInterceptRestTransport): + """REST backend synchronous transport for Intercept. + + Service for Third-Party Packet Intercept (TPPI). + TPPI is the "in-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[InterceptRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InterceptRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateInterceptDeployment( + _BaseInterceptRestTransport._BaseCreateInterceptDeployment, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.CreateInterceptDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.CreateInterceptDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create intercept + deployment method over HTTP. + + Args: + request (~.intercept.CreateInterceptDeploymentRequest): + The request object. Request message for + CreateInterceptDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseCreateInterceptDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_intercept_deployment( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseCreateInterceptDeployment._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseCreateInterceptDeployment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseCreateInterceptDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.CreateInterceptDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._CreateInterceptDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_intercept_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_intercept_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.create_intercept_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInterceptDeploymentGroup( + _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.CreateInterceptDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.CreateInterceptDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create intercept + deployment group method over HTTP. + + Args: + request (~.intercept.CreateInterceptDeploymentGroupRequest): + The request object. Request message for + CreateInterceptDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_intercept_deployment_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.CreateInterceptDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._CreateInterceptDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_intercept_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_intercept_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.create_intercept_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInterceptEndpointGroup( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.CreateInterceptEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.CreateInterceptEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create intercept endpoint + group method over HTTP. + + Args: + request (~.intercept.CreateInterceptEndpointGroupRequest): + The request object. Request message for + CreateInterceptEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_intercept_endpoint_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.CreateInterceptEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._CreateInterceptEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_intercept_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_intercept_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.create_intercept_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInterceptEndpointGroupAssociation( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation, + InterceptRestStub, + ): + def __hash__(self): + return hash( + "InterceptRestTransport.CreateInterceptEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.CreateInterceptEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create intercept endpoint + group association method over HTTP. + + Args: + request (~.intercept.CreateInterceptEndpointGroupAssociationRequest): + The request object. Request message for + CreateInterceptEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_create_intercept_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.CreateInterceptEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._CreateInterceptEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_intercept_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_intercept_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.create_intercept_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CreateInterceptEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInterceptDeployment( + _BaseInterceptRestTransport._BaseDeleteInterceptDeployment, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.DeleteInterceptDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.DeleteInterceptDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete intercept + deployment method over HTTP. + + Args: + request (~.intercept.DeleteInterceptDeploymentRequest): + The request object. Request message for + DeleteInterceptDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseDeleteInterceptDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_intercept_deployment( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseDeleteInterceptDeployment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseDeleteInterceptDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.DeleteInterceptDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._DeleteInterceptDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_intercept_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_intercept_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.delete_intercept_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInterceptDeploymentGroup( + _BaseInterceptRestTransport._BaseDeleteInterceptDeploymentGroup, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.DeleteInterceptDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.DeleteInterceptDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete intercept + deployment group method over HTTP. + + Args: + request (~.intercept.DeleteInterceptDeploymentGroupRequest): + The request object. Request message for + DeleteInterceptDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseDeleteInterceptDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_intercept_deployment_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseDeleteInterceptDeploymentGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseDeleteInterceptDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.DeleteInterceptDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._DeleteInterceptDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_intercept_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_intercept_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.delete_intercept_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInterceptEndpointGroup( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroup, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.DeleteInterceptEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.DeleteInterceptEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete intercept endpoint + group method over HTTP. + + Args: + request (~.intercept.DeleteInterceptEndpointGroupRequest): + The request object. Request message for + DeleteInterceptEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_intercept_endpoint_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.DeleteInterceptEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._DeleteInterceptEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_intercept_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_intercept_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.delete_intercept_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInterceptEndpointGroupAssociation( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroupAssociation, + InterceptRestStub, + ): + def __hash__(self): + return hash( + "InterceptRestTransport.DeleteInterceptEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.DeleteInterceptEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete intercept endpoint + group association method over HTTP. + + Args: + request (~.intercept.DeleteInterceptEndpointGroupAssociationRequest): + The request object. Request message for + DeleteInterceptEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_delete_intercept_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.DeleteInterceptEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._DeleteInterceptEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_intercept_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_intercept_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.delete_intercept_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteInterceptEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInterceptDeployment( + _BaseInterceptRestTransport._BaseGetInterceptDeployment, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.GetInterceptDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.GetInterceptDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeployment: + r"""Call the get intercept deployment method over HTTP. + + Args: + request (~.intercept.GetInterceptDeploymentRequest): + The request object. Request message for + GetInterceptDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.InterceptDeployment: + A deployment represents a zonal + intercept backend ready to accept + GENEVE-encapsulated traffic, e.g. a + zonal instance group fronted by an + internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global intercept service. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetInterceptDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_intercept_deployment( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseGetInterceptDeployment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseGetInterceptDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetInterceptDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetInterceptDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.InterceptDeployment() + pb_resp = intercept.InterceptDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_intercept_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intercept_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = intercept.InterceptDeployment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.get_intercept_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInterceptDeploymentGroup( + _BaseInterceptRestTransport._BaseGetInterceptDeploymentGroup, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.GetInterceptDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.GetInterceptDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptDeploymentGroup: + r"""Call the get intercept deployment + group method over HTTP. + + Args: + request (~.intercept.GetInterceptDeploymentGroupRequest): + The request object. Request message for + GetInterceptDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.InterceptDeploymentGroup: + A deployment group aggregates many + zonal intercept backends (deployments) + into a single global intercept service. + Consumers can connect this service using + an endpoint group. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetInterceptDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_intercept_deployment_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseGetInterceptDeploymentGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseGetInterceptDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetInterceptDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._GetInterceptDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.InterceptDeploymentGroup() + pb_resp = intercept.InterceptDeploymentGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_intercept_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_intercept_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = intercept.InterceptDeploymentGroup.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.get_intercept_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInterceptEndpointGroup( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroup, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.GetInterceptEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.GetInterceptEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroup: + r"""Call the get intercept endpoint + group method over HTTP. + + Args: + request (~.intercept.GetInterceptEndpointGroupRequest): + The request object. Request message for + GetInterceptEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.InterceptEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + intercept for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A firewall rule that references the + security profile (group). + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_intercept_endpoint_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseGetInterceptEndpointGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseGetInterceptEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetInterceptEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetInterceptEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.InterceptEndpointGroup() + pb_resp = intercept.InterceptEndpointGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_intercept_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intercept_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = intercept.InterceptEndpointGroup.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.get_intercept_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInterceptEndpointGroupAssociation( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroupAssociation, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.GetInterceptEndpointGroupAssociation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.GetInterceptEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.InterceptEndpointGroupAssociation: + r"""Call the get intercept endpoint + group association method over HTTP. + + Args: + request (~.intercept.GetInterceptEndpointGroupAssociationRequest): + The request object. Request message for + GetInterceptEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.InterceptEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable intercept by itself. To + enable intercept, the user must also + create a network firewall policy + containing intercept rules and associate + it with the network. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_get_intercept_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseGetInterceptEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseGetInterceptEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetInterceptEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetInterceptEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.InterceptEndpointGroupAssociation() + pb_resp = intercept.InterceptEndpointGroupAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_intercept_endpoint_group_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_intercept_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + intercept.InterceptEndpointGroupAssociation.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.get_intercept_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetInterceptEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInterceptDeploymentGroups( + _BaseInterceptRestTransport._BaseListInterceptDeploymentGroups, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.ListInterceptDeploymentGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.ListInterceptDeploymentGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.ListInterceptDeploymentGroupsResponse: + r"""Call the list intercept deployment + groups method over HTTP. + + Args: + request (~.intercept.ListInterceptDeploymentGroupsRequest): + The request object. Request message for + ListInterceptDeploymentGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.ListInterceptDeploymentGroupsResponse: + Response message for + ListInterceptDeploymentGroups. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListInterceptDeploymentGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_intercept_deployment_groups( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseListInterceptDeploymentGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseListInterceptDeploymentGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListInterceptDeploymentGroups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptDeploymentGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._ListInterceptDeploymentGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.ListInterceptDeploymentGroupsResponse() + pb_resp = intercept.ListInterceptDeploymentGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_intercept_deployment_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_intercept_deployment_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + intercept.ListInterceptDeploymentGroupsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.list_intercept_deployment_groups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptDeploymentGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInterceptDeployments( + _BaseInterceptRestTransport._BaseListInterceptDeployments, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.ListInterceptDeployments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.ListInterceptDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.ListInterceptDeploymentsResponse: + r"""Call the list intercept + deployments method over HTTP. + + Args: + request (~.intercept.ListInterceptDeploymentsRequest): + The request object. Request message for + ListInterceptDeployments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.ListInterceptDeploymentsResponse: + Response message for + ListInterceptDeployments. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListInterceptDeployments._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_intercept_deployments( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseListInterceptDeployments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseListInterceptDeployments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListInterceptDeployments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptDeployments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._ListInterceptDeployments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.ListInterceptDeploymentsResponse() + pb_resp = intercept.ListInterceptDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_intercept_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_intercept_deployments_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + intercept.ListInterceptDeploymentsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.list_intercept_deployments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptDeployments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInterceptEndpointGroupAssociations( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroupAssociations, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.ListInterceptEndpointGroupAssociations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.ListInterceptEndpointGroupAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.ListInterceptEndpointGroupAssociationsResponse: + r"""Call the list intercept endpoint + group associations method over HTTP. + + Args: + request (~.intercept.ListInterceptEndpointGroupAssociationsRequest): + The request object. Request message for + ListInterceptEndpointGroupAssociations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.ListInterceptEndpointGroupAssociationsResponse: + Response message for + ListInterceptEndpointGroupAssociations. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroupAssociations._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_intercept_endpoint_group_associations( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseListInterceptEndpointGroupAssociations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseListInterceptEndpointGroupAssociations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListInterceptEndpointGroupAssociations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptEndpointGroupAssociations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._ListInterceptEndpointGroupAssociations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.ListInterceptEndpointGroupAssociationsResponse() + pb_resp = intercept.ListInterceptEndpointGroupAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_intercept_endpoint_group_associations( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_intercept_endpoint_group_associations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = intercept.ListInterceptEndpointGroupAssociationsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.list_intercept_endpoint_group_associations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptEndpointGroupAssociations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInterceptEndpointGroups( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroups, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.ListInterceptEndpointGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: intercept.ListInterceptEndpointGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> intercept.ListInterceptEndpointGroupsResponse: + r"""Call the list intercept endpoint + groups method over HTTP. + + Args: + request (~.intercept.ListInterceptEndpointGroupsRequest): + The request object. Request message for + ListInterceptEndpointGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.intercept.ListInterceptEndpointGroupsResponse: + Response message for + ListInterceptEndpointGroups. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_intercept_endpoint_groups( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseListInterceptEndpointGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseListInterceptEndpointGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListInterceptEndpointGroups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptEndpointGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._ListInterceptEndpointGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intercept.ListInterceptEndpointGroupsResponse() + pb_resp = intercept.ListInterceptEndpointGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_intercept_endpoint_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_intercept_endpoint_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + intercept.ListInterceptEndpointGroupsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.list_intercept_endpoint_groups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListInterceptEndpointGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInterceptDeployment( + _BaseInterceptRestTransport._BaseUpdateInterceptDeployment, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.UpdateInterceptDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.UpdateInterceptDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update intercept + deployment method over HTTP. + + Args: + request (~.intercept.UpdateInterceptDeploymentRequest): + The request object. Request message for + UpdateInterceptDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseUpdateInterceptDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_intercept_deployment( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseUpdateInterceptDeployment._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseUpdateInterceptDeployment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseUpdateInterceptDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.UpdateInterceptDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._UpdateInterceptDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_intercept_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_intercept_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.update_intercept_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInterceptDeploymentGroup( + _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup, + InterceptRestStub, + ): + def __hash__(self): + return hash("InterceptRestTransport.UpdateInterceptDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.UpdateInterceptDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update intercept + deployment group method over HTTP. + + Args: + request (~.intercept.UpdateInterceptDeploymentGroupRequest): + The request object. Request message for + UpdateInterceptDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_intercept_deployment_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.UpdateInterceptDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._UpdateInterceptDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_intercept_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_intercept_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.update_intercept_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInterceptEndpointGroup( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.UpdateInterceptEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.UpdateInterceptEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update intercept endpoint + group method over HTTP. + + Args: + request (~.intercept.UpdateInterceptEndpointGroupRequest): + The request object. Request message for + UpdateInterceptEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_intercept_endpoint_group( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.UpdateInterceptEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InterceptRestTransport._UpdateInterceptEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_intercept_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_intercept_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.update_intercept_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInterceptEndpointGroupAssociation( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation, + InterceptRestStub, + ): + def __hash__(self): + return hash( + "InterceptRestTransport.UpdateInterceptEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: intercept.UpdateInterceptEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update intercept endpoint + group association method over HTTP. + + Args: + request (~.intercept.UpdateInterceptEndpointGroupAssociationRequest): + The request object. Request message for + UpdateInterceptEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_update_intercept_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.UpdateInterceptEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._UpdateInterceptEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_intercept_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_intercept_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptClient.update_intercept_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "UpdateInterceptEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_intercept_deployment( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInterceptDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.CreateInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInterceptDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInterceptEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.CreateInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInterceptEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_intercept_deployment( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInterceptDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInterceptDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInterceptEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.DeleteInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInterceptEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_intercept_deployment( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentRequest], intercept.InterceptDeployment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInterceptDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.GetInterceptDeploymentGroupRequest], + intercept.InterceptDeploymentGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInterceptDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupRequest], intercept.InterceptEndpointGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInterceptEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.GetInterceptEndpointGroupAssociationRequest], + intercept.InterceptEndpointGroupAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInterceptEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_intercept_deployment_groups( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentGroupsRequest], + intercept.ListInterceptDeploymentGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInterceptDeploymentGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_intercept_deployments( + self, + ) -> Callable[ + [intercept.ListInterceptDeploymentsRequest], + intercept.ListInterceptDeploymentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInterceptDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_intercept_endpoint_group_associations( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupAssociationsRequest], + intercept.ListInterceptEndpointGroupAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInterceptEndpointGroupAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_intercept_endpoint_groups( + self, + ) -> Callable[ + [intercept.ListInterceptEndpointGroupsRequest], + intercept.ListInterceptEndpointGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInterceptEndpointGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_intercept_deployment( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInterceptDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_intercept_deployment_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInterceptDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_intercept_endpoint_group( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInterceptEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_intercept_endpoint_group_association( + self, + ) -> Callable[ + [intercept.UpdateInterceptEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInterceptEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseInterceptRestTransport._BaseGetLocation, InterceptRestStub): + def __hash__(self): + return hash("InterceptRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseInterceptRestTransport._BaseListLocations, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseInterceptRestTransport._BaseGetIamPolicy, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseInterceptRestTransport._BaseSetIamPolicy, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseInterceptRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseInterceptRestTransport._BaseTestIamPermissions, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseInterceptRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInterceptRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseInterceptRestTransport._BaseCancelOperation, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseInterceptRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseInterceptRestTransport._BaseDeleteOperation, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseInterceptRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseInterceptRestTransport._BaseGetOperation, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseInterceptRestTransport._BaseListOperations, InterceptRestStub + ): + def __hash__(self): + return hash("InterceptRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseInterceptRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = ( + _BaseInterceptRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseInterceptRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.InterceptClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InterceptRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.InterceptAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Intercept", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("InterceptRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest_base.py new file mode 100644 index 000000000000..46aebf2bb875 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/intercept/transports/rest_base.py @@ -0,0 +1,1447 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import intercept + +from .base import DEFAULT_CLIENT_INFO, InterceptTransport + + +class _BaseInterceptRestTransport(InterceptTransport): + """Base REST backend transport for Intercept. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateInterceptDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "interceptDeploymentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptDeployments", + "body": "intercept_deployment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.CreateInterceptDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseCreateInterceptDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInterceptDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "interceptDeploymentGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptDeploymentGroups", + "body": "intercept_deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.CreateInterceptDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseCreateInterceptDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInterceptEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "interceptEndpointGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroups", + "body": "intercept_endpoint_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.CreateInterceptEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInterceptEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroupAssociations", + "body": "intercept_endpoint_group_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.CreateInterceptEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseCreateInterceptEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInterceptDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptDeployments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.DeleteInterceptDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseDeleteInterceptDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInterceptDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptDeploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.DeleteInterceptDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseDeleteInterceptDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInterceptEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.DeleteInterceptEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInterceptEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroupAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.DeleteInterceptEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseDeleteInterceptEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInterceptDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptDeployments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.GetInterceptDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseGetInterceptDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInterceptDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptDeploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.GetInterceptDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseGetInterceptDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInterceptEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.GetInterceptEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInterceptEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroupAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.GetInterceptEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseGetInterceptEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInterceptDeploymentGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptDeploymentGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.ListInterceptDeploymentGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseListInterceptDeploymentGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInterceptDeployments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptDeployments", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.ListInterceptDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseListInterceptDeployments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInterceptEndpointGroupAssociations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroupAssociations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.ListInterceptEndpointGroupAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroupAssociations._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInterceptEndpointGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.ListInterceptEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseListInterceptEndpointGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInterceptDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{intercept_deployment.name=projects/*/locations/*/interceptDeployments/*}", + "body": "intercept_deployment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.UpdateInterceptDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseUpdateInterceptDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInterceptDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{intercept_deployment_group.name=projects/*/locations/*/interceptDeploymentGroups/*}", + "body": "intercept_deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.UpdateInterceptDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseUpdateInterceptDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInterceptEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{intercept_endpoint_group.name=projects/*/locations/*/interceptEndpointGroups/*}", + "body": "intercept_endpoint_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.UpdateInterceptEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInterceptEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{intercept_endpoint_group_association.name=projects/*/locations/*/interceptEndpointGroupAssociations/*}", + "body": "intercept_endpoint_group_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = intercept.UpdateInterceptEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseInterceptRestTransport._BaseUpdateInterceptEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseInterceptRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/__init__.py new file mode 100644 index 000000000000..3816db9fe6f9 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MirroringAsyncClient +from .client import MirroringClient + +__all__ = ( + "MirroringClient", + "MirroringAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/async_client.py new file mode 100644 index 000000000000..6a0c3b5b6935 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/async_client.py @@ -0,0 +1,3792 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.mirroring import pagers +from google.cloud.network_security_v1alpha1.types import common, mirroring + +from .client import MirroringClient +from .transports.base import DEFAULT_CLIENT_INFO, MirroringTransport +from .transports.grpc_asyncio import MirroringGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class MirroringAsyncClient: + """PM2 is the "out-of-band" flavor of the Network Security + Integrations product. + """ + + _client: MirroringClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MirroringClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MirroringClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MirroringClient._DEFAULT_UNIVERSE + + forwarding_rule_path = staticmethod(MirroringClient.forwarding_rule_path) + parse_forwarding_rule_path = staticmethod( + MirroringClient.parse_forwarding_rule_path + ) + mirroring_deployment_path = staticmethod(MirroringClient.mirroring_deployment_path) + parse_mirroring_deployment_path = staticmethod( + MirroringClient.parse_mirroring_deployment_path + ) + mirroring_deployment_group_path = staticmethod( + MirroringClient.mirroring_deployment_group_path + ) + parse_mirroring_deployment_group_path = staticmethod( + MirroringClient.parse_mirroring_deployment_group_path + ) + mirroring_endpoint_group_path = staticmethod( + MirroringClient.mirroring_endpoint_group_path + ) + parse_mirroring_endpoint_group_path = staticmethod( + MirroringClient.parse_mirroring_endpoint_group_path + ) + mirroring_endpoint_group_association_path = staticmethod( + MirroringClient.mirroring_endpoint_group_association_path + ) + parse_mirroring_endpoint_group_association_path = staticmethod( + MirroringClient.parse_mirroring_endpoint_group_association_path + ) + network_path = staticmethod(MirroringClient.network_path) + parse_network_path = staticmethod(MirroringClient.parse_network_path) + common_billing_account_path = staticmethod( + MirroringClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MirroringClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MirroringClient.common_folder_path) + parse_common_folder_path = staticmethod(MirroringClient.parse_common_folder_path) + common_organization_path = staticmethod(MirroringClient.common_organization_path) + parse_common_organization_path = staticmethod( + MirroringClient.parse_common_organization_path + ) + common_project_path = staticmethod(MirroringClient.common_project_path) + parse_common_project_path = staticmethod(MirroringClient.parse_common_project_path) + common_location_path = staticmethod(MirroringClient.common_location_path) + parse_common_location_path = staticmethod( + MirroringClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MirroringAsyncClient: The constructed client. + """ + return MirroringClient.from_service_account_info.__func__(MirroringAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MirroringAsyncClient: The constructed client. + """ + return MirroringClient.from_service_account_file.__func__(MirroringAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MirroringClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MirroringTransport: + """Returns the transport used by the client instance. + + Returns: + MirroringTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MirroringClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MirroringTransport, Callable[..., MirroringTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the mirroring async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MirroringTransport,Callable[..., MirroringTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MirroringTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MirroringClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.MirroringAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "credentialsType": None, + }, + ) + + async def list_mirroring_endpoint_groups( + self, + request: Optional[ + Union[mirroring.ListMirroringEndpointGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringEndpointGroupsAsyncPager: + r"""Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_mirroring_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest, dict]]): + The request object. Request message for + ListMirroringEndpointGroups. + parent (:class:`str`): + Required. The parent, which owns this collection of + endpoint groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupsAsyncPager: + Response message for + ListMirroringEndpointGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringEndpointGroupsRequest): + request = mirroring.ListMirroringEndpointGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_mirroring_endpoint_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMirroringEndpointGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.GetMirroringEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroup: + r"""Gets a specific endpoint group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_endpoint_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupRequest, dict]]): + The request object. Request message for + GetMirroringEndpointGroup. + name (:class:`str`): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + mirroring for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A mirroring rule that references the + security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringEndpointGroupRequest): + request = mirroring.GetMirroringEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.CreateMirroringEndpointGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_endpoint_group: Optional[mirroring.MirroringEndpointGroup] = None, + mirroring_endpoint_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupRequest, dict]]): + The request object. Request message for + CreateMirroringEndpointGroup. + parent (:class:`str`): + Required. The parent resource where + this endpoint group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group (:class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup`): + Required. The endpoint group to + create. + + This corresponds to the ``mirroring_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_id (:class:`str`): + Required. The ID to use for the + endpoint group, which will become the + final component of the endpoint group's + resource name. + + This corresponds to the ``mirroring_endpoint_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure mirroring for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A mirroring rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_endpoint_group, + mirroring_endpoint_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringEndpointGroupRequest): + request = mirroring.CreateMirroringEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_endpoint_group is not None: + request.mirroring_endpoint_group = mirroring_endpoint_group + if mirroring_endpoint_group_id is not None: + request.mirroring_endpoint_group_id = mirroring_endpoint_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.UpdateMirroringEndpointGroupRequest, dict] + ] = None, + *, + mirroring_endpoint_group: Optional[mirroring.MirroringEndpointGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an endpoint group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupRequest( + ) + + # Make the request + operation = client.update_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupRequest, dict]]): + The request object. Request message for + UpdateMirroringEndpointGroup. + mirroring_endpoint_group (:class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup`): + Required. The endpoint group to + update. + + This corresponds to the ``mirroring_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the endpoint group (e.g. + ``description``; *not* + ``mirroring_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure mirroring for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A mirroring rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_endpoint_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringEndpointGroupRequest): + request = mirroring.UpdateMirroringEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_endpoint_group is not None: + request.mirroring_endpoint_group = mirroring_endpoint_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_endpoint_group.name", + request.mirroring_endpoint_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.DeleteMirroringEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an endpoint group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupRequest, dict]]): + The request object. Request message for + DeleteMirroringEndpointGroup. + name (:class:`str`): + Required. The endpoint group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringEndpointGroupRequest): + request = mirroring.DeleteMirroringEndpointGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_mirroring_endpoint_group_associations( + self, + request: Optional[ + Union[mirroring.ListMirroringEndpointGroupAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringEndpointGroupAssociationsAsyncPager: + r"""Lists associations in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_mirroring_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_group_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest, dict]]): + The request object. Request message for + ListMirroringEndpointGroupAssociations. + parent (:class:`str`): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupAssociationsAsyncPager: + Response message for + ListMirroringEndpointGroupAssociations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.ListMirroringEndpointGroupAssociationsRequest + ): + request = mirroring.ListMirroringEndpointGroupAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_mirroring_endpoint_group_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMirroringEndpointGroupAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.GetMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroupAssociation: + r"""Gets a specific association. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_endpoint_group_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + GetMirroringEndpointGroupAssociation. + name (:class:`str`): + Required. The name of the association to retrieve. + Format: + projects/{project}/locations/{location}/mirroringEndpointGroupAssociations/{mirroring_endpoint_group_association} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable mirroring by itself. To + enable mirroring, the user must also + create a network firewall policy + containing mirroring rules and associate + it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.GetMirroringEndpointGroupAssociationRequest + ): + request = mirroring.GetMirroringEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.CreateMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_endpoint_group_association: Optional[ + mirroring.MirroringEndpointGroupAssociation + ] = None, + mirroring_endpoint_group_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an association in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + CreateMirroringEndpointGroupAssociation. + parent (:class:`str`): + Required. The parent resource where + this association will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_association (:class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation`): + Required. The association to create. + This corresponds to the ``mirroring_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_association_id (:class:`str`): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's + resource name. If not provided, the + server will generate a unique ID. + + This corresponds to the ``mirroring_endpoint_group_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable mirroring by itself. To + enable mirroring, the user must also create a network + firewall policy containing mirroring rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_endpoint_group_association, + mirroring_endpoint_group_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.CreateMirroringEndpointGroupAssociationRequest + ): + request = mirroring.CreateMirroringEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_endpoint_group_association is not None: + request.mirroring_endpoint_group_association = ( + mirroring_endpoint_group_association + ) + if mirroring_endpoint_group_association_id is not None: + request.mirroring_endpoint_group_association_id = ( + mirroring_endpoint_group_association_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.UpdateMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + mirroring_endpoint_group_association: Optional[ + mirroring.MirroringEndpointGroupAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an association. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupAssociationRequest( + ) + + # Make the request + operation = client.update_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + UpdateMirroringEndpointGroupAssociation. + mirroring_endpoint_group_association (:class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation`): + Required. The association to update. + This corresponds to the ``mirroring_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the association (e.g. + ``description``; *not* + ``mirroring_endpoint_group_association.description``). + See https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable mirroring by itself. To + enable mirroring, the user must also create a network + firewall policy containing mirroring rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_endpoint_group_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.UpdateMirroringEndpointGroupAssociationRequest + ): + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_endpoint_group_association is not None: + request.mirroring_endpoint_group_association = ( + mirroring_endpoint_group_association + ) + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_endpoint_group_association.name", + request.mirroring_endpoint_group_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.DeleteMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an association. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupAssociationRequest, dict]]): + The request object. Request message for + DeleteMirroringEndpointGroupAssociation. + name (:class:`str`): + Required. The association to delete. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.DeleteMirroringEndpointGroupAssociationRequest + ): + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_mirroring_deployment_groups( + self, + request: Optional[ + Union[mirroring.ListMirroringDeploymentGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringDeploymentGroupsAsyncPager: + r"""Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_mirroring_deployment_groups(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest, dict]]): + The request object. Request message for + ListMirroringDeploymentGroups. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentGroupsAsyncPager: + Response message for + ListMirroringDeploymentGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringDeploymentGroupsRequest): + request = mirroring.ListMirroringDeploymentGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_mirroring_deployment_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMirroringDeploymentGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.GetMirroringDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeploymentGroup: + r"""Gets a specific deployment group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentGroupRequest, dict]]): + The request object. Request message for + GetMirroringDeploymentGroup. + name (:class:`str`): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringDeploymentGroups/{mirroring_deployment_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup: + A deployment group aggregates many + zonal mirroring backends (deployments) + into a single global mirroring service. + Consumers can connect this service using + an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringDeploymentGroupRequest): + request = mirroring.GetMirroringDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.CreateMirroringDeploymentGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_deployment_group: Optional[mirroring.MirroringDeploymentGroup] = None, + mirroring_deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.create_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentGroupRequest, dict]]): + The request object. Request message for + CreateMirroringDeploymentGroup. + parent (:class:`str`): + Required. The parent resource where + this deployment group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_group (:class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup`): + Required. The deployment group to + create. + + This corresponds to the ``mirroring_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_group_id (:class:`str`): + Required. The ID to use for the new + deployment group, which will become the + final component of the deployment + group's resource name. + + This corresponds to the ``mirroring_deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup` A deployment group aggregates many zonal mirroring backends (deployments) + into a single global mirroring service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_deployment_group, + mirroring_deployment_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringDeploymentGroupRequest): + request = mirroring.CreateMirroringDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_deployment_group is not None: + request.mirroring_deployment_group = mirroring_deployment_group + if mirroring_deployment_group_id is not None: + request.mirroring_deployment_group_id = mirroring_deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.UpdateMirroringDeploymentGroupRequest, dict] + ] = None, + *, + mirroring_deployment_group: Optional[mirroring.MirroringDeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a deployment group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentGroupRequest( + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.update_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentGroupRequest, dict]]): + The request object. Request message for + UpdateMirroringDeploymentGroup. + mirroring_deployment_group (:class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup`): + Required. The deployment group to + update. + + This corresponds to the ``mirroring_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the deployment group (e.g. + ``description``; *not* + ``mirroring_deployment_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup` A deployment group aggregates many zonal mirroring backends (deployments) + into a single global mirroring service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringDeploymentGroupRequest): + request = mirroring.UpdateMirroringDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_deployment_group is not None: + request.mirroring_deployment_group = mirroring_deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_deployment_group.name", + request.mirroring_deployment_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.DeleteMirroringDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a deployment group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentGroupRequest, dict]]): + The request object. Request message for + DeleteMirroringDeploymentGroup. + name (:class:`str`): + Required. The deployment group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringDeploymentGroupRequest): + request = mirroring.DeleteMirroringDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_mirroring_deployments( + self, + request: Optional[ + Union[mirroring.ListMirroringDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringDeploymentsAsyncPager: + r"""Lists deployments in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_mirroring_deployments(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest, dict]]): + The request object. Request message for + ListMirroringDeployments. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentsAsyncPager: + Response message for + ListMirroringDeployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringDeploymentsRequest): + request = mirroring.ListMirroringDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_mirroring_deployments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMirroringDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_mirroring_deployment( + self, + request: Optional[Union[mirroring.GetMirroringDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeployment: + r"""Gets a specific deployment. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentRequest, dict]]): + The request object. Request message for + GetMirroringDeployment. + name (:class:`str`): + Required. The name of the deployment to retrieve. + Format: + projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringDeployment: + A deployment represents a zonal + mirroring backend ready to accept + GENEVE-encapsulated replica traffic, + e.g. a zonal instance group fronted by + an internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringDeploymentRequest): + request = mirroring.GetMirroringDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.CreateMirroringDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_deployment: Optional[mirroring.MirroringDeployment] = None, + mirroring_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.create_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentRequest, dict]]): + The request object. Request message for + CreateMirroringDeployment. + parent (:class:`str`): + Required. The parent resource where + this deployment will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment (:class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment`): + Required. The deployment to create. + This corresponds to the ``mirroring_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_id (:class:`str`): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource + name. + + This corresponds to the ``mirroring_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment` A deployment represents a zonal mirroring backend ready to accept + GENEVE-encapsulated replica traffic, e.g. a zonal + instance group fronted by an internal passthrough + load balancer. Deployments are always part of a + global deployment group which represents a global + mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, mirroring_deployment, mirroring_deployment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringDeploymentRequest): + request = mirroring.CreateMirroringDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_deployment is not None: + request.mirroring_deployment = mirroring_deployment + if mirroring_deployment_id is not None: + request.mirroring_deployment_id = mirroring_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.UpdateMirroringDeploymentRequest, dict] + ] = None, + *, + mirroring_deployment: Optional[mirroring.MirroringDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a deployment. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentRequest( + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.update_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentRequest, dict]]): + The request object. Request message for + UpdateMirroringDeployment. + mirroring_deployment (:class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment`): + Required. The deployment to update. + This corresponds to the ``mirroring_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Fields are + specified relative to the deployment (e.g. + ``description``; *not* + ``mirroring_deployment.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment` A deployment represents a zonal mirroring backend ready to accept + GENEVE-encapsulated replica traffic, e.g. a zonal + instance group fronted by an internal passthrough + load balancer. Deployments are always part of a + global deployment group which represents a global + mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_deployment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringDeploymentRequest): + request = mirroring.UpdateMirroringDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_deployment is not None: + request.mirroring_deployment = mirroring_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("mirroring_deployment.name", request.mirroring_deployment.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + mirroring.MirroringDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.DeleteMirroringDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a deployment. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentRequest, dict]]): + The request object. Request message for + DeleteMirroringDeployment. + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringDeploymentRequest): + request = mirroring.DeleteMirroringDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MirroringAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("MirroringAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/client.py new file mode 100644 index 000000000000..84074bac164d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/client.py @@ -0,0 +1,4305 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.mirroring import pagers +from google.cloud.network_security_v1alpha1.types import common, mirroring + +from .transports.base import DEFAULT_CLIENT_INFO, MirroringTransport +from .transports.grpc import MirroringGrpcTransport +from .transports.grpc_asyncio import MirroringGrpcAsyncIOTransport +from .transports.rest import MirroringRestTransport + + +class MirroringClientMeta(type): + """Metaclass for the Mirroring client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[MirroringTransport]] + _transport_registry["grpc"] = MirroringGrpcTransport + _transport_registry["grpc_asyncio"] = MirroringGrpcAsyncIOTransport + _transport_registry["rest"] = MirroringRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MirroringTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MirroringClient(metaclass=MirroringClientMeta): + """PM2 is the "out-of-band" flavor of the Network Security + Integrations product. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MirroringClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MirroringClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MirroringTransport: + """Returns the transport used by the client instance. + + Returns: + MirroringTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def forwarding_rule_path( + project: str, + forwarding_rule: str, + ) -> str: + """Returns a fully-qualified forwarding_rule string.""" + return "projects/{project}/global/forwardingRules/{forwarding_rule}".format( + project=project, + forwarding_rule=forwarding_rule, + ) + + @staticmethod + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: + """Parses a forwarding_rule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/forwardingRules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def mirroring_deployment_path( + project: str, + location: str, + mirroring_deployment: str, + ) -> str: + """Returns a fully-qualified mirroring_deployment string.""" + return "projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment}".format( + project=project, + location=location, + mirroring_deployment=mirroring_deployment, + ) + + @staticmethod + def parse_mirroring_deployment_path(path: str) -> Dict[str, str]: + """Parses a mirroring_deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/mirroringDeployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def mirroring_deployment_group_path( + project: str, + location: str, + mirroring_deployment_group: str, + ) -> str: + """Returns a fully-qualified mirroring_deployment_group string.""" + return "projects/{project}/locations/{location}/mirroringDeploymentGroups/{mirroring_deployment_group}".format( + project=project, + location=location, + mirroring_deployment_group=mirroring_deployment_group, + ) + + @staticmethod + def parse_mirroring_deployment_group_path(path: str) -> Dict[str, str]: + """Parses a mirroring_deployment_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/mirroringDeploymentGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def mirroring_endpoint_group_path( + project: str, + location: str, + mirroring_endpoint_group: str, + ) -> str: + """Returns a fully-qualified mirroring_endpoint_group string.""" + return "projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group}".format( + project=project, + location=location, + mirroring_endpoint_group=mirroring_endpoint_group, + ) + + @staticmethod + def parse_mirroring_endpoint_group_path(path: str) -> Dict[str, str]: + """Parses a mirroring_endpoint_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/mirroringEndpointGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def mirroring_endpoint_group_association_path( + project: str, + location: str, + mirroring_endpoint_group_association: str, + ) -> str: + """Returns a fully-qualified mirroring_endpoint_group_association string.""" + return "projects/{project}/locations/{location}/mirroringEndpointGroupAssociations/{mirroring_endpoint_group_association}".format( + project=project, + location=location, + mirroring_endpoint_group_association=mirroring_endpoint_group_association, + ) + + @staticmethod + def parse_mirroring_endpoint_group_association_path(path: str) -> Dict[str, str]: + """Parses a mirroring_endpoint_group_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/mirroringEndpointGroupAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MirroringClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MirroringClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MirroringClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MirroringTransport, Callable[..., MirroringTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the mirroring client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MirroringTransport,Callable[..., MirroringTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MirroringTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MirroringClient._read_environment_variables() + self._client_cert_source = MirroringClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MirroringClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MirroringTransport) + if transport_provided: + # transport is a MirroringTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MirroringTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or MirroringClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[MirroringTransport], Callable[..., MirroringTransport] + ] = ( + MirroringClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MirroringTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.MirroringClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "credentialsType": None, + }, + ) + + def list_mirroring_endpoint_groups( + self, + request: Optional[ + Union[mirroring.ListMirroringEndpointGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringEndpointGroupsPager: + r"""Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_mirroring_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest, dict]): + The request object. Request message for + ListMirroringEndpointGroups. + parent (str): + Required. The parent, which owns this collection of + endpoint groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupsPager: + Response message for + ListMirroringEndpointGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringEndpointGroupsRequest): + request = mirroring.ListMirroringEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_mirroring_endpoint_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMirroringEndpointGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.GetMirroringEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroup: + r"""Gets a specific endpoint group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_endpoint_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupRequest, dict]): + The request object. Request message for + GetMirroringEndpointGroup. + name (str): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + mirroring for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A mirroring rule that references the + security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringEndpointGroupRequest): + request = mirroring.GetMirroringEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.CreateMirroringEndpointGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_endpoint_group: Optional[mirroring.MirroringEndpointGroup] = None, + mirroring_endpoint_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupRequest, dict]): + The request object. Request message for + CreateMirroringEndpointGroup. + parent (str): + Required. The parent resource where + this endpoint group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup): + Required. The endpoint group to + create. + + This corresponds to the ``mirroring_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_id (str): + Required. The ID to use for the + endpoint group, which will become the + final component of the endpoint group's + resource name. + + This corresponds to the ``mirroring_endpoint_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure mirroring for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A mirroring rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_endpoint_group, + mirroring_endpoint_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringEndpointGroupRequest): + request = mirroring.CreateMirroringEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_endpoint_group is not None: + request.mirroring_endpoint_group = mirroring_endpoint_group + if mirroring_endpoint_group_id is not None: + request.mirroring_endpoint_group_id = mirroring_endpoint_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.UpdateMirroringEndpointGroupRequest, dict] + ] = None, + *, + mirroring_endpoint_group: Optional[mirroring.MirroringEndpointGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an endpoint group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupRequest( + ) + + # Make the request + operation = client.update_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupRequest, dict]): + The request object. Request message for + UpdateMirroringEndpointGroup. + mirroring_endpoint_group (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup): + Required. The endpoint group to + update. + + This corresponds to the ``mirroring_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the endpoint group (e.g. + ``description``; *not* + ``mirroring_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup` An endpoint group is a consumer frontend for a deployment group (backend). + In order to configure mirroring for a network, + consumers must create: - An association between their + network and the endpoint group. - A security profile + that points to the endpoint group. - A mirroring rule + that references the security profile (group). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_endpoint_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringEndpointGroupRequest): + request = mirroring.UpdateMirroringEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_endpoint_group is not None: + request.mirroring_endpoint_group = mirroring_endpoint_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_endpoint_group.name", + request.mirroring_endpoint_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringEndpointGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_mirroring_endpoint_group( + self, + request: Optional[ + Union[mirroring.DeleteMirroringEndpointGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an endpoint group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupRequest, dict]): + The request object. Request message for + DeleteMirroringEndpointGroup. + name (str): + Required. The endpoint group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringEndpointGroupRequest): + request = mirroring.DeleteMirroringEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_mirroring_endpoint_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_mirroring_endpoint_group_associations( + self, + request: Optional[ + Union[mirroring.ListMirroringEndpointGroupAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringEndpointGroupAssociationsPager: + r"""Lists associations in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_mirroring_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_group_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest, dict]): + The request object. Request message for + ListMirroringEndpointGroupAssociations. + parent (str): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupAssociationsPager: + Response message for + ListMirroringEndpointGroupAssociations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.ListMirroringEndpointGroupAssociationsRequest + ): + request = mirroring.ListMirroringEndpointGroupAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_mirroring_endpoint_group_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMirroringEndpointGroupAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.GetMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroupAssociation: + r"""Gets a specific association. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_endpoint_group_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupAssociationRequest, dict]): + The request object. Request message for + GetMirroringEndpointGroupAssociation. + name (str): + Required. The name of the association to retrieve. + Format: + projects/{project}/locations/{location}/mirroringEndpointGroupAssociations/{mirroring_endpoint_group_association} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable mirroring by itself. To + enable mirroring, the user must also + create a network firewall policy + containing mirroring rules and associate + it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.GetMirroringEndpointGroupAssociationRequest + ): + request = mirroring.GetMirroringEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.CreateMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_endpoint_group_association: Optional[ + mirroring.MirroringEndpointGroupAssociation + ] = None, + mirroring_endpoint_group_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an association in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupAssociationRequest, dict]): + The request object. Request message for + CreateMirroringEndpointGroupAssociation. + parent (str): + Required. The parent resource where + this association will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_association (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation): + Required. The association to create. + This corresponds to the ``mirroring_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_endpoint_group_association_id (str): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's + resource name. If not provided, the + server will generate a unique ID. + + This corresponds to the ``mirroring_endpoint_group_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable mirroring by itself. To + enable mirroring, the user must also create a network + firewall policy containing mirroring rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_endpoint_group_association, + mirroring_endpoint_group_association_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.CreateMirroringEndpointGroupAssociationRequest + ): + request = mirroring.CreateMirroringEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_endpoint_group_association is not None: + request.mirroring_endpoint_group_association = ( + mirroring_endpoint_group_association + ) + if mirroring_endpoint_group_association_id is not None: + request.mirroring_endpoint_group_association_id = ( + mirroring_endpoint_group_association_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.UpdateMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + mirroring_endpoint_group_association: Optional[ + mirroring.MirroringEndpointGroupAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an association. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupAssociationRequest( + ) + + # Make the request + operation = client.update_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupAssociationRequest, dict]): + The request object. Request message for + UpdateMirroringEndpointGroupAssociation. + mirroring_endpoint_group_association (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation): + Required. The association to update. + This corresponds to the ``mirroring_endpoint_group_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the association (e.g. + ``description``; *not* + ``mirroring_endpoint_group_association.description``). + See https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation` An endpoint group association represents a link between a network and an + endpoint group in the organization. + + Creating an association creates the networking + infrastructure linking the network to the endpoint + group, but does not enable mirroring by itself. To + enable mirroring, the user must also create a network + firewall policy containing mirroring rules and + associate it with the network. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_endpoint_group_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.UpdateMirroringEndpointGroupAssociationRequest + ): + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_endpoint_group_association is not None: + request.mirroring_endpoint_group_association = ( + mirroring_endpoint_group_association + ) + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_endpoint_group_association.name", + request.mirroring_endpoint_group_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringEndpointGroupAssociation, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_mirroring_endpoint_group_association( + self, + request: Optional[ + Union[mirroring.DeleteMirroringEndpointGroupAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an association. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupAssociationRequest, dict]): + The request object. Request message for + DeleteMirroringEndpointGroupAssociation. + name (str): + Required. The association to delete. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, mirroring.DeleteMirroringEndpointGroupAssociationRequest + ): + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_mirroring_endpoint_group_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_mirroring_deployment_groups( + self, + request: Optional[ + Union[mirroring.ListMirroringDeploymentGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringDeploymentGroupsPager: + r"""Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_mirroring_deployment_groups(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest, dict]): + The request object. Request message for + ListMirroringDeploymentGroups. + parent (str): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentGroupsPager: + Response message for + ListMirroringDeploymentGroups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringDeploymentGroupsRequest): + request = mirroring.ListMirroringDeploymentGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_mirroring_deployment_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMirroringDeploymentGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.GetMirroringDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeploymentGroup: + r"""Gets a specific deployment group. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentGroupRequest, dict]): + The request object. Request message for + GetMirroringDeploymentGroup. + name (str): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringDeploymentGroups/{mirroring_deployment_group} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup: + A deployment group aggregates many + zonal mirroring backends (deployments) + into a single global mirroring service. + Consumers can connect this service using + an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringDeploymentGroupRequest): + request = mirroring.GetMirroringDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.CreateMirroringDeploymentGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_deployment_group: Optional[mirroring.MirroringDeploymentGroup] = None, + mirroring_deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.create_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentGroupRequest, dict]): + The request object. Request message for + CreateMirroringDeploymentGroup. + parent (str): + Required. The parent resource where + this deployment group will be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_group (google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup): + Required. The deployment group to + create. + + This corresponds to the ``mirroring_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_group_id (str): + Required. The ID to use for the new + deployment group, which will become the + final component of the deployment + group's resource name. + + This corresponds to the ``mirroring_deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup` A deployment group aggregates many zonal mirroring backends (deployments) + into a single global mirroring service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + mirroring_deployment_group, + mirroring_deployment_group_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringDeploymentGroupRequest): + request = mirroring.CreateMirroringDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_deployment_group is not None: + request.mirroring_deployment_group = mirroring_deployment_group + if mirroring_deployment_group_id is not None: + request.mirroring_deployment_group_id = mirroring_deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.UpdateMirroringDeploymentGroupRequest, dict] + ] = None, + *, + mirroring_deployment_group: Optional[mirroring.MirroringDeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a deployment group. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentGroupRequest( + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.update_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentGroupRequest, dict]): + The request object. Request message for + UpdateMirroringDeploymentGroup. + mirroring_deployment_group (google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup): + Required. The deployment group to + update. + + This corresponds to the ``mirroring_deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the deployment group (e.g. + ``description``; *not* + ``mirroring_deployment_group.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup` A deployment group aggregates many zonal mirroring backends (deployments) + into a single global mirroring service. Consumers can + connect this service using an endpoint group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringDeploymentGroupRequest): + request = mirroring.UpdateMirroringDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_deployment_group is not None: + request.mirroring_deployment_group = mirroring_deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "mirroring_deployment_group.name", + request.mirroring_deployment_group.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringDeploymentGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_mirroring_deployment_group( + self, + request: Optional[ + Union[mirroring.DeleteMirroringDeploymentGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a deployment group. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentGroupRequest, dict]): + The request object. Request message for + DeleteMirroringDeploymentGroup. + name (str): + Required. The deployment group to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringDeploymentGroupRequest): + request = mirroring.DeleteMirroringDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_mirroring_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_mirroring_deployments( + self, + request: Optional[ + Union[mirroring.ListMirroringDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMirroringDeploymentsPager: + r"""Lists deployments in a given project and location. + See https://google.aip.dev/132. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_mirroring_deployments(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest, dict]): + The request object. Request message for + ListMirroringDeployments. + parent (str): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentsPager: + Response message for + ListMirroringDeployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.ListMirroringDeploymentsRequest): + request = mirroring.ListMirroringDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_mirroring_deployments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMirroringDeploymentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_mirroring_deployment( + self, + request: Optional[Union[mirroring.GetMirroringDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeployment: + r"""Gets a specific deployment. + See https://google.aip.dev/131. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentRequest, dict]): + The request object. Request message for + GetMirroringDeployment. + name (str): + Required. The name of the deployment to retrieve. + Format: + projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.MirroringDeployment: + A deployment represents a zonal + mirroring backend ready to accept + GENEVE-encapsulated replica traffic, + e.g. a zonal instance group fronted by + an internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.GetMirroringDeploymentRequest): + request = mirroring.GetMirroringDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_mirroring_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.CreateMirroringDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + mirroring_deployment: Optional[mirroring.MirroringDeployment] = None, + mirroring_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.create_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentRequest, dict]): + The request object. Request message for + CreateMirroringDeployment. + parent (str): + Required. The parent resource where + this deployment will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment (google.cloud.network_security_v1alpha1.types.MirroringDeployment): + Required. The deployment to create. + This corresponds to the ``mirroring_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mirroring_deployment_id (str): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource + name. + + This corresponds to the ``mirroring_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment` A deployment represents a zonal mirroring backend ready to accept + GENEVE-encapsulated replica traffic, e.g. a zonal + instance group fronted by an internal passthrough + load balancer. Deployments are always part of a + global deployment group which represents a global + mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, mirroring_deployment, mirroring_deployment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.CreateMirroringDeploymentRequest): + request = mirroring.CreateMirroringDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if mirroring_deployment is not None: + request.mirroring_deployment = mirroring_deployment + if mirroring_deployment_id is not None: + request.mirroring_deployment_id = mirroring_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.UpdateMirroringDeploymentRequest, dict] + ] = None, + *, + mirroring_deployment: Optional[mirroring.MirroringDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a deployment. + See https://google.aip.dev/134. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentRequest( + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.update_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentRequest, dict]): + The request object. Request message for + UpdateMirroringDeployment. + mirroring_deployment (google.cloud.network_security_v1alpha1.types.MirroringDeployment): + Required. The deployment to update. + This corresponds to the ``mirroring_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are + specified relative to the deployment (e.g. + ``description``; *not* + ``mirroring_deployment.description``). See + https://google.aip.dev/161 for more details. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.MirroringDeployment` A deployment represents a zonal mirroring backend ready to accept + GENEVE-encapsulated replica traffic, e.g. a zonal + instance group fronted by an internal passthrough + load balancer. Deployments are always part of a + global deployment group which represents a global + mirroring service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [mirroring_deployment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.UpdateMirroringDeploymentRequest): + request = mirroring.UpdateMirroringDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if mirroring_deployment is not None: + request.mirroring_deployment = mirroring_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("mirroring_deployment.name", request.mirroring_deployment.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + mirroring.MirroringDeployment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_mirroring_deployment( + self, + request: Optional[ + Union[mirroring.DeleteMirroringDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a deployment. + See https://google.aip.dev/135. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentRequest, dict]): + The request object. Request message for + DeleteMirroringDeployment. + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, mirroring.DeleteMirroringDeploymentRequest): + request = mirroring.DeleteMirroringDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_mirroring_deployment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MirroringClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("MirroringClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/pagers.py new file mode 100644 index 000000000000..f7cab1cba2ff --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/pagers.py @@ -0,0 +1,677 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import mirroring + + +class ListMirroringEndpointGroupsPager: + """A pager for iterating through ``list_mirroring_endpoint_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``mirroring_endpoint_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMirroringEndpointGroups`` requests and continue to iterate + through the ``mirroring_endpoint_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., mirroring.ListMirroringEndpointGroupsResponse], + request: mirroring.ListMirroringEndpointGroupsRequest, + response: mirroring.ListMirroringEndpointGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringEndpointGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[mirroring.ListMirroringEndpointGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[mirroring.MirroringEndpointGroup]: + for page in self.pages: + yield from page.mirroring_endpoint_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringEndpointGroupsAsyncPager: + """A pager for iterating through ``list_mirroring_endpoint_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``mirroring_endpoint_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMirroringEndpointGroups`` requests and continue to iterate + through the ``mirroring_endpoint_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[mirroring.ListMirroringEndpointGroupsResponse]], + request: mirroring.ListMirroringEndpointGroupsRequest, + response: mirroring.ListMirroringEndpointGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringEndpointGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[mirroring.ListMirroringEndpointGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[mirroring.MirroringEndpointGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.mirroring_endpoint_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringEndpointGroupAssociationsPager: + """A pager for iterating through ``list_mirroring_endpoint_group_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``mirroring_endpoint_group_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMirroringEndpointGroupAssociations`` requests and continue to iterate + through the ``mirroring_endpoint_group_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., mirroring.ListMirroringEndpointGroupAssociationsResponse], + request: mirroring.ListMirroringEndpointGroupAssociationsRequest, + response: mirroring.ListMirroringEndpointGroupAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringEndpointGroupAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[mirroring.ListMirroringEndpointGroupAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[mirroring.MirroringEndpointGroupAssociation]: + for page in self.pages: + yield from page.mirroring_endpoint_group_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringEndpointGroupAssociationsAsyncPager: + """A pager for iterating through ``list_mirroring_endpoint_group_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``mirroring_endpoint_group_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMirroringEndpointGroupAssociations`` requests and continue to iterate + through the ``mirroring_endpoint_group_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[mirroring.ListMirroringEndpointGroupAssociationsResponse] + ], + request: mirroring.ListMirroringEndpointGroupAssociationsRequest, + response: mirroring.ListMirroringEndpointGroupAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringEndpointGroupAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[mirroring.ListMirroringEndpointGroupAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[mirroring.MirroringEndpointGroupAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.mirroring_endpoint_group_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringDeploymentGroupsPager: + """A pager for iterating through ``list_mirroring_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``mirroring_deployment_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMirroringDeploymentGroups`` requests and continue to iterate + through the ``mirroring_deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., mirroring.ListMirroringDeploymentGroupsResponse], + request: mirroring.ListMirroringDeploymentGroupsRequest, + response: mirroring.ListMirroringDeploymentGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[mirroring.ListMirroringDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[mirroring.MirroringDeploymentGroup]: + for page in self.pages: + yield from page.mirroring_deployment_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringDeploymentGroupsAsyncPager: + """A pager for iterating through ``list_mirroring_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``mirroring_deployment_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMirroringDeploymentGroups`` requests and continue to iterate + through the ``mirroring_deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[mirroring.ListMirroringDeploymentGroupsResponse] + ], + request: mirroring.ListMirroringDeploymentGroupsRequest, + response: mirroring.ListMirroringDeploymentGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[mirroring.ListMirroringDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[mirroring.MirroringDeploymentGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.mirroring_deployment_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringDeploymentsPager: + """A pager for iterating through ``list_mirroring_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``mirroring_deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMirroringDeployments`` requests and continue to iterate + through the ``mirroring_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., mirroring.ListMirroringDeploymentsResponse], + request: mirroring.ListMirroringDeploymentsRequest, + response: mirroring.ListMirroringDeploymentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringDeploymentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[mirroring.ListMirroringDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[mirroring.MirroringDeployment]: + for page in self.pages: + yield from page.mirroring_deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMirroringDeploymentsAsyncPager: + """A pager for iterating through ``list_mirroring_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``mirroring_deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMirroringDeployments`` requests and continue to iterate + through the ``mirroring_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[mirroring.ListMirroringDeploymentsResponse]], + request: mirroring.ListMirroringDeploymentsRequest, + response: mirroring.ListMirroringDeploymentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = mirroring.ListMirroringDeploymentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[mirroring.ListMirroringDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[mirroring.MirroringDeployment]: + async def async_generator(): + async for page in self.pages: + for response in page.mirroring_deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/README.rst new file mode 100644 index 000000000000..6dac7546731c --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MirroringTransport` is the ABC for all transports. +- public child `MirroringGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MirroringGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMirroringRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MirroringRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/__init__.py new file mode 100644 index 000000000000..c7eb043071e2 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MirroringTransport +from .grpc import MirroringGrpcTransport +from .grpc_asyncio import MirroringGrpcAsyncIOTransport +from .rest import MirroringRestInterceptor, MirroringRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MirroringTransport]] +_transport_registry["grpc"] = MirroringGrpcTransport +_transport_registry["grpc_asyncio"] = MirroringGrpcAsyncIOTransport +_transport_registry["rest"] = MirroringRestTransport + +__all__ = ( + "MirroringTransport", + "MirroringGrpcTransport", + "MirroringGrpcAsyncIOTransport", + "MirroringRestTransport", + "MirroringRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/base.py new file mode 100644 index 000000000000..67aa1296e665 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/base.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import mirroring + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MirroringTransport(abc.ABC): + """Abstract transport class for Mirroring.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_mirroring_endpoint_groups: gapic_v1.method.wrap_method( + self.list_mirroring_endpoint_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_endpoint_group: gapic_v1.method.wrap_method( + self.get_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_endpoint_group: gapic_v1.method.wrap_method( + self.create_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_endpoint_group: gapic_v1.method.wrap_method( + self.update_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_endpoint_group: gapic_v1.method.wrap_method( + self.delete_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_endpoint_group_associations: gapic_v1.method.wrap_method( + self.list_mirroring_endpoint_group_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_endpoint_group_association: gapic_v1.method.wrap_method( + self.get_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_endpoint_group_association: gapic_v1.method.wrap_method( + self.create_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_endpoint_group_association: gapic_v1.method.wrap_method( + self.update_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_endpoint_group_association: gapic_v1.method.wrap_method( + self.delete_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_deployment_groups: gapic_v1.method.wrap_method( + self.list_mirroring_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_deployment_group: gapic_v1.method.wrap_method( + self.get_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_deployment_group: gapic_v1.method.wrap_method( + self.create_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_deployment_group: gapic_v1.method.wrap_method( + self.update_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_deployment_group: gapic_v1.method.wrap_method( + self.delete_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_deployments: gapic_v1.method.wrap_method( + self.list_mirroring_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_deployment: gapic_v1.method.wrap_method( + self.get_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_deployment: gapic_v1.method.wrap_method( + self.create_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_deployment: gapic_v1.method.wrap_method( + self.update_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_deployment: gapic_v1.method.wrap_method( + self.delete_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_mirroring_endpoint_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupsRequest], + Union[ + mirroring.ListMirroringEndpointGroupsResponse, + Awaitable[mirroring.ListMirroringEndpointGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupRequest], + Union[ + mirroring.MirroringEndpointGroup, + Awaitable[mirroring.MirroringEndpointGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_mirroring_endpoint_group_associations( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupAssociationsRequest], + Union[ + mirroring.ListMirroringEndpointGroupAssociationsResponse, + Awaitable[mirroring.ListMirroringEndpointGroupAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupAssociationRequest], + Union[ + mirroring.MirroringEndpointGroupAssociation, + Awaitable[mirroring.MirroringEndpointGroupAssociation], + ], + ]: + raise NotImplementedError() + + @property + def create_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_mirroring_deployment_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentGroupsRequest], + Union[ + mirroring.ListMirroringDeploymentGroupsResponse, + Awaitable[mirroring.ListMirroringDeploymentGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentGroupRequest], + Union[ + mirroring.MirroringDeploymentGroup, + Awaitable[mirroring.MirroringDeploymentGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_mirroring_deployments( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentsRequest], + Union[ + mirroring.ListMirroringDeploymentsResponse, + Awaitable[mirroring.ListMirroringDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentRequest], + Union[mirroring.MirroringDeployment, Awaitable[mirroring.MirroringDeployment]], + ]: + raise NotImplementedError() + + @property + def create_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MirroringTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc.py new file mode 100644 index 000000000000..9c61bdad78c9 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc.py @@ -0,0 +1,1179 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import mirroring + +from .base import DEFAULT_CLIENT_INFO, MirroringTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MirroringGrpcTransport(MirroringTransport): + """gRPC backend transport for Mirroring. + + PM2 is the "out-of-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_mirroring_endpoint_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupsRequest], + mirroring.ListMirroringEndpointGroupsResponse, + ]: + r"""Return a callable for the list mirroring endpoint groups method over gRPC. + + Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringEndpointGroupsRequest], + ~.ListMirroringEndpointGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_endpoint_groups" not in self._stubs: + self._stubs[ + "list_mirroring_endpoint_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringEndpointGroups", + request_serializer=mirroring.ListMirroringEndpointGroupsRequest.serialize, + response_deserializer=mirroring.ListMirroringEndpointGroupsResponse.deserialize, + ) + return self._stubs["list_mirroring_endpoint_groups"] + + @property + def get_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupRequest], mirroring.MirroringEndpointGroup + ]: + r"""Return a callable for the get mirroring endpoint group method over gRPC. + + Gets a specific endpoint group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringEndpointGroupRequest], + ~.MirroringEndpointGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "get_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringEndpointGroup", + request_serializer=mirroring.GetMirroringEndpointGroupRequest.serialize, + response_deserializer=mirroring.MirroringEndpointGroup.deserialize, + ) + return self._stubs["get_mirroring_endpoint_group"] + + @property + def create_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create mirroring endpoint + group method over gRPC. + + Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "create_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringEndpointGroup", + request_serializer=mirroring.CreateMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_endpoint_group"] + + @property + def update_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update mirroring endpoint + group method over gRPC. + + Updates an endpoint group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "update_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringEndpointGroup", + request_serializer=mirroring.UpdateMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_endpoint_group"] + + @property + def delete_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete mirroring endpoint + group method over gRPC. + + Deletes an endpoint group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringEndpointGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "delete_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringEndpointGroup", + request_serializer=mirroring.DeleteMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_endpoint_group"] + + @property + def list_mirroring_endpoint_group_associations( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupAssociationsRequest], + mirroring.ListMirroringEndpointGroupAssociationsResponse, + ]: + r"""Return a callable for the list mirroring endpoint group + associations method over gRPC. + + Lists associations in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringEndpointGroupAssociationsRequest], + ~.ListMirroringEndpointGroupAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_endpoint_group_associations" not in self._stubs: + self._stubs[ + "list_mirroring_endpoint_group_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringEndpointGroupAssociations", + request_serializer=mirroring.ListMirroringEndpointGroupAssociationsRequest.serialize, + response_deserializer=mirroring.ListMirroringEndpointGroupAssociationsResponse.deserialize, + ) + return self._stubs["list_mirroring_endpoint_group_associations"] + + @property + def get_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupAssociationRequest], + mirroring.MirroringEndpointGroupAssociation, + ]: + r"""Return a callable for the get mirroring endpoint group + association method over gRPC. + + Gets a specific association. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringEndpointGroupAssociationRequest], + ~.MirroringEndpointGroupAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "get_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringEndpointGroupAssociation", + request_serializer=mirroring.GetMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=mirroring.MirroringEndpointGroupAssociation.deserialize, + ) + return self._stubs["get_mirroring_endpoint_group_association"] + + @property + def create_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create mirroring endpoint + group association method over gRPC. + + Creates an association in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "create_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringEndpointGroupAssociation", + request_serializer=mirroring.CreateMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_endpoint_group_association"] + + @property + def update_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update mirroring endpoint + group association method over gRPC. + + Updates an association. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "update_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringEndpointGroupAssociation", + request_serializer=mirroring.UpdateMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_endpoint_group_association"] + + @property + def delete_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete mirroring endpoint + group association method over gRPC. + + Deletes an association. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringEndpointGroupAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "delete_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringEndpointGroupAssociation", + request_serializer=mirroring.DeleteMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_endpoint_group_association"] + + @property + def list_mirroring_deployment_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentGroupsRequest], + mirroring.ListMirroringDeploymentGroupsResponse, + ]: + r"""Return a callable for the list mirroring deployment + groups method over gRPC. + + Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringDeploymentGroupsRequest], + ~.ListMirroringDeploymentGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_deployment_groups" not in self._stubs: + self._stubs[ + "list_mirroring_deployment_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringDeploymentGroups", + request_serializer=mirroring.ListMirroringDeploymentGroupsRequest.serialize, + response_deserializer=mirroring.ListMirroringDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_mirroring_deployment_groups"] + + @property + def get_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentGroupRequest], + mirroring.MirroringDeploymentGroup, + ]: + r"""Return a callable for the get mirroring deployment group method over gRPC. + + Gets a specific deployment group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringDeploymentGroupRequest], + ~.MirroringDeploymentGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "get_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringDeploymentGroup", + request_serializer=mirroring.GetMirroringDeploymentGroupRequest.serialize, + response_deserializer=mirroring.MirroringDeploymentGroup.deserialize, + ) + return self._stubs["get_mirroring_deployment_group"] + + @property + def create_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create mirroring deployment + group method over gRPC. + + Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "create_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringDeploymentGroup", + request_serializer=mirroring.CreateMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_deployment_group"] + + @property + def update_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update mirroring deployment + group method over gRPC. + + Updates a deployment group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "update_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringDeploymentGroup", + request_serializer=mirroring.UpdateMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_deployment_group"] + + @property + def delete_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete mirroring deployment + group method over gRPC. + + Deletes a deployment group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "delete_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringDeploymentGroup", + request_serializer=mirroring.DeleteMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_deployment_group"] + + @property + def list_mirroring_deployments( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentsRequest], + mirroring.ListMirroringDeploymentsResponse, + ]: + r"""Return a callable for the list mirroring deployments method over gRPC. + + Lists deployments in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringDeploymentsRequest], + ~.ListMirroringDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_deployments" not in self._stubs: + self._stubs[ + "list_mirroring_deployments" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringDeployments", + request_serializer=mirroring.ListMirroringDeploymentsRequest.serialize, + response_deserializer=mirroring.ListMirroringDeploymentsResponse.deserialize, + ) + return self._stubs["list_mirroring_deployments"] + + @property + def get_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentRequest], mirroring.MirroringDeployment + ]: + r"""Return a callable for the get mirroring deployment method over gRPC. + + Gets a specific deployment. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringDeploymentRequest], + ~.MirroringDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_deployment" not in self._stubs: + self._stubs["get_mirroring_deployment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringDeployment", + request_serializer=mirroring.GetMirroringDeploymentRequest.serialize, + response_deserializer=mirroring.MirroringDeployment.deserialize, + ) + return self._stubs["get_mirroring_deployment"] + + @property + def create_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create mirroring deployment method over gRPC. + + Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_deployment" not in self._stubs: + self._stubs[ + "create_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringDeployment", + request_serializer=mirroring.CreateMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_deployment"] + + @property + def update_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update mirroring deployment method over gRPC. + + Updates a deployment. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_deployment" not in self._stubs: + self._stubs[ + "update_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringDeployment", + request_serializer=mirroring.UpdateMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_deployment"] + + @property + def delete_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete mirroring deployment method over gRPC. + + Deletes a deployment. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_deployment" not in self._stubs: + self._stubs[ + "delete_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringDeployment", + request_serializer=mirroring.DeleteMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_deployment"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MirroringGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc_asyncio.py new file mode 100644 index 000000000000..9e1f30608971 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/grpc_asyncio.py @@ -0,0 +1,1353 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import mirroring + +from .base import DEFAULT_CLIENT_INFO, MirroringTransport +from .grpc import MirroringGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MirroringGrpcAsyncIOTransport(MirroringTransport): + """gRPC AsyncIO backend transport for Mirroring. + + PM2 is the "out-of-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_mirroring_endpoint_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupsRequest], + Awaitable[mirroring.ListMirroringEndpointGroupsResponse], + ]: + r"""Return a callable for the list mirroring endpoint groups method over gRPC. + + Lists endpoint groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringEndpointGroupsRequest], + Awaitable[~.ListMirroringEndpointGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_endpoint_groups" not in self._stubs: + self._stubs[ + "list_mirroring_endpoint_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringEndpointGroups", + request_serializer=mirroring.ListMirroringEndpointGroupsRequest.serialize, + response_deserializer=mirroring.ListMirroringEndpointGroupsResponse.deserialize, + ) + return self._stubs["list_mirroring_endpoint_groups"] + + @property + def get_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupRequest], + Awaitable[mirroring.MirroringEndpointGroup], + ]: + r"""Return a callable for the get mirroring endpoint group method over gRPC. + + Gets a specific endpoint group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringEndpointGroupRequest], + Awaitable[~.MirroringEndpointGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "get_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringEndpointGroup", + request_serializer=mirroring.GetMirroringEndpointGroupRequest.serialize, + response_deserializer=mirroring.MirroringEndpointGroup.deserialize, + ) + return self._stubs["get_mirroring_endpoint_group"] + + @property + def create_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create mirroring endpoint + group method over gRPC. + + Creates an endpoint group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "create_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringEndpointGroup", + request_serializer=mirroring.CreateMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_endpoint_group"] + + @property + def update_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update mirroring endpoint + group method over gRPC. + + Updates an endpoint group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "update_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringEndpointGroup", + request_serializer=mirroring.UpdateMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_endpoint_group"] + + @property + def delete_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete mirroring endpoint + group method over gRPC. + + Deletes an endpoint group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringEndpointGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_endpoint_group" not in self._stubs: + self._stubs[ + "delete_mirroring_endpoint_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringEndpointGroup", + request_serializer=mirroring.DeleteMirroringEndpointGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_endpoint_group"] + + @property + def list_mirroring_endpoint_group_associations( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupAssociationsRequest], + Awaitable[mirroring.ListMirroringEndpointGroupAssociationsResponse], + ]: + r"""Return a callable for the list mirroring endpoint group + associations method over gRPC. + + Lists associations in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringEndpointGroupAssociationsRequest], + Awaitable[~.ListMirroringEndpointGroupAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_endpoint_group_associations" not in self._stubs: + self._stubs[ + "list_mirroring_endpoint_group_associations" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringEndpointGroupAssociations", + request_serializer=mirroring.ListMirroringEndpointGroupAssociationsRequest.serialize, + response_deserializer=mirroring.ListMirroringEndpointGroupAssociationsResponse.deserialize, + ) + return self._stubs["list_mirroring_endpoint_group_associations"] + + @property + def get_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupAssociationRequest], + Awaitable[mirroring.MirroringEndpointGroupAssociation], + ]: + r"""Return a callable for the get mirroring endpoint group + association method over gRPC. + + Gets a specific association. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringEndpointGroupAssociationRequest], + Awaitable[~.MirroringEndpointGroupAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "get_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringEndpointGroupAssociation", + request_serializer=mirroring.GetMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=mirroring.MirroringEndpointGroupAssociation.deserialize, + ) + return self._stubs["get_mirroring_endpoint_group_association"] + + @property + def create_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create mirroring endpoint + group association method over gRPC. + + Creates an association in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "create_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringEndpointGroupAssociation", + request_serializer=mirroring.CreateMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_endpoint_group_association"] + + @property + def update_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update mirroring endpoint + group association method over gRPC. + + Updates an association. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "update_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringEndpointGroupAssociation", + request_serializer=mirroring.UpdateMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_endpoint_group_association"] + + @property + def delete_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete mirroring endpoint + group association method over gRPC. + + Deletes an association. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringEndpointGroupAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_endpoint_group_association" not in self._stubs: + self._stubs[ + "delete_mirroring_endpoint_group_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringEndpointGroupAssociation", + request_serializer=mirroring.DeleteMirroringEndpointGroupAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_endpoint_group_association"] + + @property + def list_mirroring_deployment_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentGroupsRequest], + Awaitable[mirroring.ListMirroringDeploymentGroupsResponse], + ]: + r"""Return a callable for the list mirroring deployment + groups method over gRPC. + + Lists deployment groups in a given project and + location. See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringDeploymentGroupsRequest], + Awaitable[~.ListMirroringDeploymentGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_deployment_groups" not in self._stubs: + self._stubs[ + "list_mirroring_deployment_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringDeploymentGroups", + request_serializer=mirroring.ListMirroringDeploymentGroupsRequest.serialize, + response_deserializer=mirroring.ListMirroringDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_mirroring_deployment_groups"] + + @property + def get_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentGroupRequest], + Awaitable[mirroring.MirroringDeploymentGroup], + ]: + r"""Return a callable for the get mirroring deployment group method over gRPC. + + Gets a specific deployment group. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringDeploymentGroupRequest], + Awaitable[~.MirroringDeploymentGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "get_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringDeploymentGroup", + request_serializer=mirroring.GetMirroringDeploymentGroupRequest.serialize, + response_deserializer=mirroring.MirroringDeploymentGroup.deserialize, + ) + return self._stubs["get_mirroring_deployment_group"] + + @property + def create_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create mirroring deployment + group method over gRPC. + + Creates a deployment group in a given project and + location. See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "create_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringDeploymentGroup", + request_serializer=mirroring.CreateMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_deployment_group"] + + @property + def update_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update mirroring deployment + group method over gRPC. + + Updates a deployment group. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "update_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringDeploymentGroup", + request_serializer=mirroring.UpdateMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_deployment_group"] + + @property + def delete_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete mirroring deployment + group method over gRPC. + + Deletes a deployment group. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_deployment_group" not in self._stubs: + self._stubs[ + "delete_mirroring_deployment_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringDeploymentGroup", + request_serializer=mirroring.DeleteMirroringDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_deployment_group"] + + @property + def list_mirroring_deployments( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentsRequest], + Awaitable[mirroring.ListMirroringDeploymentsResponse], + ]: + r"""Return a callable for the list mirroring deployments method over gRPC. + + Lists deployments in a given project and location. + See https://google.aip.dev/132. + + Returns: + Callable[[~.ListMirroringDeploymentsRequest], + Awaitable[~.ListMirroringDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_mirroring_deployments" not in self._stubs: + self._stubs[ + "list_mirroring_deployments" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/ListMirroringDeployments", + request_serializer=mirroring.ListMirroringDeploymentsRequest.serialize, + response_deserializer=mirroring.ListMirroringDeploymentsResponse.deserialize, + ) + return self._stubs["list_mirroring_deployments"] + + @property + def get_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentRequest], + Awaitable[mirroring.MirroringDeployment], + ]: + r"""Return a callable for the get mirroring deployment method over gRPC. + + Gets a specific deployment. + See https://google.aip.dev/131. + + Returns: + Callable[[~.GetMirroringDeploymentRequest], + Awaitable[~.MirroringDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_mirroring_deployment" not in self._stubs: + self._stubs["get_mirroring_deployment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/GetMirroringDeployment", + request_serializer=mirroring.GetMirroringDeploymentRequest.serialize, + response_deserializer=mirroring.MirroringDeployment.deserialize, + ) + return self._stubs["get_mirroring_deployment"] + + @property + def create_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create mirroring deployment method over gRPC. + + Creates a deployment in a given project and location. + See https://google.aip.dev/133. + + Returns: + Callable[[~.CreateMirroringDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_mirroring_deployment" not in self._stubs: + self._stubs[ + "create_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/CreateMirroringDeployment", + request_serializer=mirroring.CreateMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_mirroring_deployment"] + + @property + def update_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update mirroring deployment method over gRPC. + + Updates a deployment. + See https://google.aip.dev/134. + + Returns: + Callable[[~.UpdateMirroringDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_mirroring_deployment" not in self._stubs: + self._stubs[ + "update_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/UpdateMirroringDeployment", + request_serializer=mirroring.UpdateMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_mirroring_deployment"] + + @property + def delete_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete mirroring deployment method over gRPC. + + Deletes a deployment. + See https://google.aip.dev/135. + + Returns: + Callable[[~.DeleteMirroringDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_mirroring_deployment" not in self._stubs: + self._stubs[ + "delete_mirroring_deployment" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.Mirroring/DeleteMirroringDeployment", + request_serializer=mirroring.DeleteMirroringDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_mirroring_deployment"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_mirroring_endpoint_groups: self._wrap_method( + self.list_mirroring_endpoint_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_endpoint_group: self._wrap_method( + self.get_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_endpoint_group: self._wrap_method( + self.create_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_endpoint_group: self._wrap_method( + self.update_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_endpoint_group: self._wrap_method( + self.delete_mirroring_endpoint_group, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_endpoint_group_associations: self._wrap_method( + self.list_mirroring_endpoint_group_associations, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_endpoint_group_association: self._wrap_method( + self.get_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_endpoint_group_association: self._wrap_method( + self.create_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_endpoint_group_association: self._wrap_method( + self.update_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_endpoint_group_association: self._wrap_method( + self.delete_mirroring_endpoint_group_association, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_deployment_groups: self._wrap_method( + self.list_mirroring_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_deployment_group: self._wrap_method( + self.get_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_deployment_group: self._wrap_method( + self.create_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_deployment_group: self._wrap_method( + self.update_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_deployment_group: self._wrap_method( + self.delete_mirroring_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_mirroring_deployments: self._wrap_method( + self.list_mirroring_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_mirroring_deployment: self._wrap_method( + self.get_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.create_mirroring_deployment: self._wrap_method( + self.create_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_mirroring_deployment: self._wrap_method( + self.update_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.delete_mirroring_deployment: self._wrap_method( + self.delete_mirroring_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("MirroringGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest.py new file mode 100644 index 000000000000..ae1b803298e7 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest.py @@ -0,0 +1,6272 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import mirroring + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMirroringRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MirroringRestInterceptor: + """Interceptor for Mirroring. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MirroringRestTransport. + + .. code-block:: python + class MyCustomMirroringInterceptor(MirroringRestInterceptor): + def pre_create_mirroring_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_mirroring_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_mirroring_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_mirroring_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_mirroring_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_mirroring_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_mirroring_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_mirroring_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_mirroring_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_mirroring_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_mirroring_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_mirroring_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_mirroring_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_mirroring_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_mirroring_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_mirroring_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_mirroring_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_mirroring_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_mirroring_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_mirroring_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_mirroring_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_mirroring_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_mirroring_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_mirroring_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_mirroring_deployment_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_mirroring_deployment_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_mirroring_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_mirroring_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_mirroring_endpoint_group_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_mirroring_endpoint_group_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_mirroring_endpoint_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_mirroring_endpoint_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_mirroring_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_mirroring_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_mirroring_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_mirroring_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_mirroring_endpoint_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_mirroring_endpoint_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_mirroring_endpoint_group_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_mirroring_endpoint_group_association(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MirroringRestTransport(interceptor=MyCustomMirroringInterceptor()) + client = MirroringClient(transport=transport) + + + """ + + def pre_create_mirroring_deployment( + self, + request: mirroring.CreateMirroringDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.CreateMirroringDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_mirroring_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_create_mirroring_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_mirroring_deployment + + DEPRECATED. Please use the `post_create_mirroring_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_create_mirroring_deployment` interceptor runs + before the `post_create_mirroring_deployment_with_metadata` interceptor. + """ + return response + + def post_create_mirroring_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_mirroring_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_create_mirroring_deployment_with_metadata` + interceptor in new development instead of the `post_create_mirroring_deployment` interceptor. + When both interceptors are used, this `post_create_mirroring_deployment_with_metadata` interceptor runs after the + `post_create_mirroring_deployment` interceptor. The (possibly modified) response returned by + `post_create_mirroring_deployment` will be passed to + `post_create_mirroring_deployment_with_metadata`. + """ + return response, metadata + + def pre_create_mirroring_deployment_group( + self, + request: mirroring.CreateMirroringDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.CreateMirroringDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_mirroring_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_create_mirroring_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_mirroring_deployment_group + + DEPRECATED. Please use the `post_create_mirroring_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_create_mirroring_deployment_group` interceptor runs + before the `post_create_mirroring_deployment_group_with_metadata` interceptor. + """ + return response + + def post_create_mirroring_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_mirroring_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_create_mirroring_deployment_group_with_metadata` + interceptor in new development instead of the `post_create_mirroring_deployment_group` interceptor. + When both interceptors are used, this `post_create_mirroring_deployment_group_with_metadata` interceptor runs after the + `post_create_mirroring_deployment_group` interceptor. The (possibly modified) response returned by + `post_create_mirroring_deployment_group` will be passed to + `post_create_mirroring_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_create_mirroring_endpoint_group( + self, + request: mirroring.CreateMirroringEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.CreateMirroringEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_mirroring_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_create_mirroring_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_mirroring_endpoint_group + + DEPRECATED. Please use the `post_create_mirroring_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_create_mirroring_endpoint_group` interceptor runs + before the `post_create_mirroring_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_create_mirroring_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_mirroring_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_create_mirroring_endpoint_group_with_metadata` + interceptor in new development instead of the `post_create_mirroring_endpoint_group` interceptor. + When both interceptors are used, this `post_create_mirroring_endpoint_group_with_metadata` interceptor runs after the + `post_create_mirroring_endpoint_group` interceptor. The (possibly modified) response returned by + `post_create_mirroring_endpoint_group` will be passed to + `post_create_mirroring_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_create_mirroring_endpoint_group_association( + self, + request: mirroring.CreateMirroringEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.CreateMirroringEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_mirroring_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_create_mirroring_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_mirroring_endpoint_group_association + + DEPRECATED. Please use the `post_create_mirroring_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_create_mirroring_endpoint_group_association` interceptor runs + before the `post_create_mirroring_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_create_mirroring_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_mirroring_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_create_mirroring_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_create_mirroring_endpoint_group_association` interceptor. + When both interceptors are used, this `post_create_mirroring_endpoint_group_association_with_metadata` interceptor runs after the + `post_create_mirroring_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_create_mirroring_endpoint_group_association` will be passed to + `post_create_mirroring_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_delete_mirroring_deployment( + self, + request: mirroring.DeleteMirroringDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.DeleteMirroringDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_mirroring_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_delete_mirroring_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_mirroring_deployment + + DEPRECATED. Please use the `post_delete_mirroring_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_delete_mirroring_deployment` interceptor runs + before the `post_delete_mirroring_deployment_with_metadata` interceptor. + """ + return response + + def post_delete_mirroring_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_mirroring_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_delete_mirroring_deployment_with_metadata` + interceptor in new development instead of the `post_delete_mirroring_deployment` interceptor. + When both interceptors are used, this `post_delete_mirroring_deployment_with_metadata` interceptor runs after the + `post_delete_mirroring_deployment` interceptor. The (possibly modified) response returned by + `post_delete_mirroring_deployment` will be passed to + `post_delete_mirroring_deployment_with_metadata`. + """ + return response, metadata + + def pre_delete_mirroring_deployment_group( + self, + request: mirroring.DeleteMirroringDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.DeleteMirroringDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_mirroring_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_delete_mirroring_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_mirroring_deployment_group + + DEPRECATED. Please use the `post_delete_mirroring_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_delete_mirroring_deployment_group` interceptor runs + before the `post_delete_mirroring_deployment_group_with_metadata` interceptor. + """ + return response + + def post_delete_mirroring_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_mirroring_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_delete_mirroring_deployment_group_with_metadata` + interceptor in new development instead of the `post_delete_mirroring_deployment_group` interceptor. + When both interceptors are used, this `post_delete_mirroring_deployment_group_with_metadata` interceptor runs after the + `post_delete_mirroring_deployment_group` interceptor. The (possibly modified) response returned by + `post_delete_mirroring_deployment_group` will be passed to + `post_delete_mirroring_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_delete_mirroring_endpoint_group( + self, + request: mirroring.DeleteMirroringEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.DeleteMirroringEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_mirroring_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_delete_mirroring_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_mirroring_endpoint_group + + DEPRECATED. Please use the `post_delete_mirroring_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_delete_mirroring_endpoint_group` interceptor runs + before the `post_delete_mirroring_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_delete_mirroring_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_mirroring_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_delete_mirroring_endpoint_group_with_metadata` + interceptor in new development instead of the `post_delete_mirroring_endpoint_group` interceptor. + When both interceptors are used, this `post_delete_mirroring_endpoint_group_with_metadata` interceptor runs after the + `post_delete_mirroring_endpoint_group` interceptor. The (possibly modified) response returned by + `post_delete_mirroring_endpoint_group` will be passed to + `post_delete_mirroring_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_delete_mirroring_endpoint_group_association( + self, + request: mirroring.DeleteMirroringEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.DeleteMirroringEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_mirroring_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_delete_mirroring_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_mirroring_endpoint_group_association + + DEPRECATED. Please use the `post_delete_mirroring_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_delete_mirroring_endpoint_group_association` interceptor runs + before the `post_delete_mirroring_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_delete_mirroring_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_mirroring_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_delete_mirroring_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_delete_mirroring_endpoint_group_association` interceptor. + When both interceptors are used, this `post_delete_mirroring_endpoint_group_association_with_metadata` interceptor runs after the + `post_delete_mirroring_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_delete_mirroring_endpoint_group_association` will be passed to + `post_delete_mirroring_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_get_mirroring_deployment( + self, + request: mirroring.GetMirroringDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.GetMirroringDeploymentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_mirroring_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_mirroring_deployment( + self, response: mirroring.MirroringDeployment + ) -> mirroring.MirroringDeployment: + """Post-rpc interceptor for get_mirroring_deployment + + DEPRECATED. Please use the `post_get_mirroring_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_get_mirroring_deployment` interceptor runs + before the `post_get_mirroring_deployment_with_metadata` interceptor. + """ + return response + + def post_get_mirroring_deployment_with_metadata( + self, + response: mirroring.MirroringDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[mirroring.MirroringDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_mirroring_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_get_mirroring_deployment_with_metadata` + interceptor in new development instead of the `post_get_mirroring_deployment` interceptor. + When both interceptors are used, this `post_get_mirroring_deployment_with_metadata` interceptor runs after the + `post_get_mirroring_deployment` interceptor. The (possibly modified) response returned by + `post_get_mirroring_deployment` will be passed to + `post_get_mirroring_deployment_with_metadata`. + """ + return response, metadata + + def pre_get_mirroring_deployment_group( + self, + request: mirroring.GetMirroringDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.GetMirroringDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_mirroring_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_mirroring_deployment_group( + self, response: mirroring.MirroringDeploymentGroup + ) -> mirroring.MirroringDeploymentGroup: + """Post-rpc interceptor for get_mirroring_deployment_group + + DEPRECATED. Please use the `post_get_mirroring_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_get_mirroring_deployment_group` interceptor runs + before the `post_get_mirroring_deployment_group_with_metadata` interceptor. + """ + return response + + def post_get_mirroring_deployment_group_with_metadata( + self, + response: mirroring.MirroringDeploymentGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.MirroringDeploymentGroup, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_mirroring_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_get_mirroring_deployment_group_with_metadata` + interceptor in new development instead of the `post_get_mirroring_deployment_group` interceptor. + When both interceptors are used, this `post_get_mirroring_deployment_group_with_metadata` interceptor runs after the + `post_get_mirroring_deployment_group` interceptor. The (possibly modified) response returned by + `post_get_mirroring_deployment_group` will be passed to + `post_get_mirroring_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_get_mirroring_endpoint_group( + self, + request: mirroring.GetMirroringEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.GetMirroringEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_mirroring_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_mirroring_endpoint_group( + self, response: mirroring.MirroringEndpointGroup + ) -> mirroring.MirroringEndpointGroup: + """Post-rpc interceptor for get_mirroring_endpoint_group + + DEPRECATED. Please use the `post_get_mirroring_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_get_mirroring_endpoint_group` interceptor runs + before the `post_get_mirroring_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_get_mirroring_endpoint_group_with_metadata( + self, + response: mirroring.MirroringEndpointGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.MirroringEndpointGroup, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_mirroring_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_get_mirroring_endpoint_group_with_metadata` + interceptor in new development instead of the `post_get_mirroring_endpoint_group` interceptor. + When both interceptors are used, this `post_get_mirroring_endpoint_group_with_metadata` interceptor runs after the + `post_get_mirroring_endpoint_group` interceptor. The (possibly modified) response returned by + `post_get_mirroring_endpoint_group` will be passed to + `post_get_mirroring_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_get_mirroring_endpoint_group_association( + self, + request: mirroring.GetMirroringEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.GetMirroringEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_mirroring_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_mirroring_endpoint_group_association( + self, response: mirroring.MirroringEndpointGroupAssociation + ) -> mirroring.MirroringEndpointGroupAssociation: + """Post-rpc interceptor for get_mirroring_endpoint_group_association + + DEPRECATED. Please use the `post_get_mirroring_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_get_mirroring_endpoint_group_association` interceptor runs + before the `post_get_mirroring_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_get_mirroring_endpoint_group_association_with_metadata( + self, + response: mirroring.MirroringEndpointGroupAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.MirroringEndpointGroupAssociation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_mirroring_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_get_mirroring_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_get_mirroring_endpoint_group_association` interceptor. + When both interceptors are used, this `post_get_mirroring_endpoint_group_association_with_metadata` interceptor runs after the + `post_get_mirroring_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_get_mirroring_endpoint_group_association` will be passed to + `post_get_mirroring_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_list_mirroring_deployment_groups( + self, + request: mirroring.ListMirroringDeploymentGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringDeploymentGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_mirroring_deployment_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_mirroring_deployment_groups( + self, response: mirroring.ListMirroringDeploymentGroupsResponse + ) -> mirroring.ListMirroringDeploymentGroupsResponse: + """Post-rpc interceptor for list_mirroring_deployment_groups + + DEPRECATED. Please use the `post_list_mirroring_deployment_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_list_mirroring_deployment_groups` interceptor runs + before the `post_list_mirroring_deployment_groups_with_metadata` interceptor. + """ + return response + + def post_list_mirroring_deployment_groups_with_metadata( + self, + response: mirroring.ListMirroringDeploymentGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringDeploymentGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_mirroring_deployment_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_list_mirroring_deployment_groups_with_metadata` + interceptor in new development instead of the `post_list_mirroring_deployment_groups` interceptor. + When both interceptors are used, this `post_list_mirroring_deployment_groups_with_metadata` interceptor runs after the + `post_list_mirroring_deployment_groups` interceptor. The (possibly modified) response returned by + `post_list_mirroring_deployment_groups` will be passed to + `post_list_mirroring_deployment_groups_with_metadata`. + """ + return response, metadata + + def pre_list_mirroring_deployments( + self, + request: mirroring.ListMirroringDeploymentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringDeploymentsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_mirroring_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_mirroring_deployments( + self, response: mirroring.ListMirroringDeploymentsResponse + ) -> mirroring.ListMirroringDeploymentsResponse: + """Post-rpc interceptor for list_mirroring_deployments + + DEPRECATED. Please use the `post_list_mirroring_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_list_mirroring_deployments` interceptor runs + before the `post_list_mirroring_deployments_with_metadata` interceptor. + """ + return response + + def post_list_mirroring_deployments_with_metadata( + self, + response: mirroring.ListMirroringDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringDeploymentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_mirroring_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_list_mirroring_deployments_with_metadata` + interceptor in new development instead of the `post_list_mirroring_deployments` interceptor. + When both interceptors are used, this `post_list_mirroring_deployments_with_metadata` interceptor runs after the + `post_list_mirroring_deployments` interceptor. The (possibly modified) response returned by + `post_list_mirroring_deployments` will be passed to + `post_list_mirroring_deployments_with_metadata`. + """ + return response, metadata + + def pre_list_mirroring_endpoint_group_associations( + self, + request: mirroring.ListMirroringEndpointGroupAssociationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringEndpointGroupAssociationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_mirroring_endpoint_group_associations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_mirroring_endpoint_group_associations( + self, response: mirroring.ListMirroringEndpointGroupAssociationsResponse + ) -> mirroring.ListMirroringEndpointGroupAssociationsResponse: + """Post-rpc interceptor for list_mirroring_endpoint_group_associations + + DEPRECATED. Please use the `post_list_mirroring_endpoint_group_associations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_list_mirroring_endpoint_group_associations` interceptor runs + before the `post_list_mirroring_endpoint_group_associations_with_metadata` interceptor. + """ + return response + + def post_list_mirroring_endpoint_group_associations_with_metadata( + self, + response: mirroring.ListMirroringEndpointGroupAssociationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringEndpointGroupAssociationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_mirroring_endpoint_group_associations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_list_mirroring_endpoint_group_associations_with_metadata` + interceptor in new development instead of the `post_list_mirroring_endpoint_group_associations` interceptor. + When both interceptors are used, this `post_list_mirroring_endpoint_group_associations_with_metadata` interceptor runs after the + `post_list_mirroring_endpoint_group_associations` interceptor. The (possibly modified) response returned by + `post_list_mirroring_endpoint_group_associations` will be passed to + `post_list_mirroring_endpoint_group_associations_with_metadata`. + """ + return response, metadata + + def pre_list_mirroring_endpoint_groups( + self, + request: mirroring.ListMirroringEndpointGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringEndpointGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_mirroring_endpoint_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_mirroring_endpoint_groups( + self, response: mirroring.ListMirroringEndpointGroupsResponse + ) -> mirroring.ListMirroringEndpointGroupsResponse: + """Post-rpc interceptor for list_mirroring_endpoint_groups + + DEPRECATED. Please use the `post_list_mirroring_endpoint_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_list_mirroring_endpoint_groups` interceptor runs + before the `post_list_mirroring_endpoint_groups_with_metadata` interceptor. + """ + return response + + def post_list_mirroring_endpoint_groups_with_metadata( + self, + response: mirroring.ListMirroringEndpointGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.ListMirroringEndpointGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_mirroring_endpoint_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_list_mirroring_endpoint_groups_with_metadata` + interceptor in new development instead of the `post_list_mirroring_endpoint_groups` interceptor. + When both interceptors are used, this `post_list_mirroring_endpoint_groups_with_metadata` interceptor runs after the + `post_list_mirroring_endpoint_groups` interceptor. The (possibly modified) response returned by + `post_list_mirroring_endpoint_groups` will be passed to + `post_list_mirroring_endpoint_groups_with_metadata`. + """ + return response, metadata + + def pre_update_mirroring_deployment( + self, + request: mirroring.UpdateMirroringDeploymentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.UpdateMirroringDeploymentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_mirroring_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_update_mirroring_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_mirroring_deployment + + DEPRECATED. Please use the `post_update_mirroring_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_update_mirroring_deployment` interceptor runs + before the `post_update_mirroring_deployment_with_metadata` interceptor. + """ + return response + + def post_update_mirroring_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_mirroring_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_update_mirroring_deployment_with_metadata` + interceptor in new development instead of the `post_update_mirroring_deployment` interceptor. + When both interceptors are used, this `post_update_mirroring_deployment_with_metadata` interceptor runs after the + `post_update_mirroring_deployment` interceptor. The (possibly modified) response returned by + `post_update_mirroring_deployment` will be passed to + `post_update_mirroring_deployment_with_metadata`. + """ + return response, metadata + + def pre_update_mirroring_deployment_group( + self, + request: mirroring.UpdateMirroringDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.UpdateMirroringDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_mirroring_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_update_mirroring_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_mirroring_deployment_group + + DEPRECATED. Please use the `post_update_mirroring_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_update_mirroring_deployment_group` interceptor runs + before the `post_update_mirroring_deployment_group_with_metadata` interceptor. + """ + return response + + def post_update_mirroring_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_mirroring_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_update_mirroring_deployment_group_with_metadata` + interceptor in new development instead of the `post_update_mirroring_deployment_group` interceptor. + When both interceptors are used, this `post_update_mirroring_deployment_group_with_metadata` interceptor runs after the + `post_update_mirroring_deployment_group` interceptor. The (possibly modified) response returned by + `post_update_mirroring_deployment_group` will be passed to + `post_update_mirroring_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_update_mirroring_endpoint_group( + self, + request: mirroring.UpdateMirroringEndpointGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.UpdateMirroringEndpointGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_mirroring_endpoint_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_update_mirroring_endpoint_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_mirroring_endpoint_group + + DEPRECATED. Please use the `post_update_mirroring_endpoint_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_update_mirroring_endpoint_group` interceptor runs + before the `post_update_mirroring_endpoint_group_with_metadata` interceptor. + """ + return response + + def post_update_mirroring_endpoint_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_mirroring_endpoint_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_update_mirroring_endpoint_group_with_metadata` + interceptor in new development instead of the `post_update_mirroring_endpoint_group` interceptor. + When both interceptors are used, this `post_update_mirroring_endpoint_group_with_metadata` interceptor runs after the + `post_update_mirroring_endpoint_group` interceptor. The (possibly modified) response returned by + `post_update_mirroring_endpoint_group` will be passed to + `post_update_mirroring_endpoint_group_with_metadata`. + """ + return response, metadata + + def pre_update_mirroring_endpoint_group_association( + self, + request: mirroring.UpdateMirroringEndpointGroupAssociationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + mirroring.UpdateMirroringEndpointGroupAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_mirroring_endpoint_group_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_update_mirroring_endpoint_group_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_mirroring_endpoint_group_association + + DEPRECATED. Please use the `post_update_mirroring_endpoint_group_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. This `post_update_mirroring_endpoint_group_association` interceptor runs + before the `post_update_mirroring_endpoint_group_association_with_metadata` interceptor. + """ + return response + + def post_update_mirroring_endpoint_group_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_mirroring_endpoint_group_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Mirroring server but before it is returned to user code. + + We recommend only using this `post_update_mirroring_endpoint_group_association_with_metadata` + interceptor in new development instead of the `post_update_mirroring_endpoint_group_association` interceptor. + When both interceptors are used, this `post_update_mirroring_endpoint_group_association_with_metadata` interceptor runs after the + `post_update_mirroring_endpoint_group_association` interceptor. The (possibly modified) response returned by + `post_update_mirroring_endpoint_group_association` will be passed to + `post_update_mirroring_endpoint_group_association_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Mirroring server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Mirroring server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MirroringRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MirroringRestInterceptor + + +class MirroringRestTransport(_BaseMirroringRestTransport): + """REST backend synchronous transport for Mirroring. + + PM2 is the "out-of-band" flavor of the Network Security + Integrations product. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MirroringRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MirroringRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateMirroringDeployment( + _BaseMirroringRestTransport._BaseCreateMirroringDeployment, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.CreateMirroringDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.CreateMirroringDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create mirroring + deployment method over HTTP. + + Args: + request (~.mirroring.CreateMirroringDeploymentRequest): + The request object. Request message for + CreateMirroringDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseCreateMirroringDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_mirroring_deployment( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseCreateMirroringDeployment._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseCreateMirroringDeployment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseCreateMirroringDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.CreateMirroringDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._CreateMirroringDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_mirroring_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_mirroring_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.create_mirroring_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateMirroringDeploymentGroup( + _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.CreateMirroringDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.CreateMirroringDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create mirroring + deployment group method over HTTP. + + Args: + request (~.mirroring.CreateMirroringDeploymentGroupRequest): + The request object. Request message for + CreateMirroringDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_mirroring_deployment_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.CreateMirroringDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._CreateMirroringDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_mirroring_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_mirroring_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.create_mirroring_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateMirroringEndpointGroup( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.CreateMirroringEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.CreateMirroringEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create mirroring endpoint + group method over HTTP. + + Args: + request (~.mirroring.CreateMirroringEndpointGroupRequest): + The request object. Request message for + CreateMirroringEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_mirroring_endpoint_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.CreateMirroringEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._CreateMirroringEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_mirroring_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_mirroring_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.create_mirroring_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateMirroringEndpointGroupAssociation( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation, + MirroringRestStub, + ): + def __hash__(self): + return hash( + "MirroringRestTransport.CreateMirroringEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.CreateMirroringEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create mirroring endpoint + group association method over HTTP. + + Args: + request (~.mirroring.CreateMirroringEndpointGroupAssociationRequest): + The request object. Request message for + CreateMirroringEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_create_mirroring_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.CreateMirroringEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._CreateMirroringEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_mirroring_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_mirroring_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.create_mirroring_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CreateMirroringEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMirroringDeployment( + _BaseMirroringRestTransport._BaseDeleteMirroringDeployment, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.DeleteMirroringDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.DeleteMirroringDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete mirroring + deployment method over HTTP. + + Args: + request (~.mirroring.DeleteMirroringDeploymentRequest): + The request object. Request message for + DeleteMirroringDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseDeleteMirroringDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_mirroring_deployment( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseDeleteMirroringDeployment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseDeleteMirroringDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.DeleteMirroringDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._DeleteMirroringDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_mirroring_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_mirroring_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.delete_mirroring_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMirroringDeploymentGroup( + _BaseMirroringRestTransport._BaseDeleteMirroringDeploymentGroup, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.DeleteMirroringDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.DeleteMirroringDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete mirroring + deployment group method over HTTP. + + Args: + request (~.mirroring.DeleteMirroringDeploymentGroupRequest): + The request object. Request message for + DeleteMirroringDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseDeleteMirroringDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_mirroring_deployment_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseDeleteMirroringDeploymentGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseDeleteMirroringDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.DeleteMirroringDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._DeleteMirroringDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_mirroring_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_mirroring_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.delete_mirroring_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMirroringEndpointGroup( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroup, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.DeleteMirroringEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.DeleteMirroringEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete mirroring endpoint + group method over HTTP. + + Args: + request (~.mirroring.DeleteMirroringEndpointGroupRequest): + The request object. Request message for + DeleteMirroringEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_mirroring_endpoint_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.DeleteMirroringEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._DeleteMirroringEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_mirroring_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_mirroring_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.delete_mirroring_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMirroringEndpointGroupAssociation( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroupAssociation, + MirroringRestStub, + ): + def __hash__(self): + return hash( + "MirroringRestTransport.DeleteMirroringEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.DeleteMirroringEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete mirroring endpoint + group association method over HTTP. + + Args: + request (~.mirroring.DeleteMirroringEndpointGroupAssociationRequest): + The request object. Request message for + DeleteMirroringEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_delete_mirroring_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.DeleteMirroringEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._DeleteMirroringEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_mirroring_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_mirroring_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.delete_mirroring_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteMirroringEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMirroringDeployment( + _BaseMirroringRestTransport._BaseGetMirroringDeployment, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.GetMirroringDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.GetMirroringDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeployment: + r"""Call the get mirroring deployment method over HTTP. + + Args: + request (~.mirroring.GetMirroringDeploymentRequest): + The request object. Request message for + GetMirroringDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.MirroringDeployment: + A deployment represents a zonal + mirroring backend ready to accept + GENEVE-encapsulated replica traffic, + e.g. a zonal instance group fronted by + an internal passthrough load balancer. + Deployments are always part of a global + deployment group which represents a + global mirroring service. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetMirroringDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_mirroring_deployment( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseGetMirroringDeployment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseGetMirroringDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetMirroringDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetMirroringDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.MirroringDeployment() + pb_resp = mirroring.MirroringDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_mirroring_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_mirroring_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = mirroring.MirroringDeployment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.get_mirroring_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMirroringDeploymentGroup( + _BaseMirroringRestTransport._BaseGetMirroringDeploymentGroup, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.GetMirroringDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.GetMirroringDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringDeploymentGroup: + r"""Call the get mirroring deployment + group method over HTTP. + + Args: + request (~.mirroring.GetMirroringDeploymentGroupRequest): + The request object. Request message for + GetMirroringDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.MirroringDeploymentGroup: + A deployment group aggregates many + zonal mirroring backends (deployments) + into a single global mirroring service. + Consumers can connect this service using + an endpoint group. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetMirroringDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_mirroring_deployment_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseGetMirroringDeploymentGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseGetMirroringDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetMirroringDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._GetMirroringDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.MirroringDeploymentGroup() + pb_resp = mirroring.MirroringDeploymentGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_mirroring_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_mirroring_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = mirroring.MirroringDeploymentGroup.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.get_mirroring_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMirroringEndpointGroup( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroup, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.GetMirroringEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.GetMirroringEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroup: + r"""Call the get mirroring endpoint + group method over HTTP. + + Args: + request (~.mirroring.GetMirroringEndpointGroupRequest): + The request object. Request message for + GetMirroringEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.MirroringEndpointGroup: + An endpoint group is a consumer + frontend for a deployment group + (backend). In order to configure + mirroring for a network, consumers must + create: + + - An association between their network + and the endpoint group. + - A security profile that points to the + endpoint group. + - A mirroring rule that references the + security profile (group). + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_mirroring_endpoint_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseGetMirroringEndpointGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseGetMirroringEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetMirroringEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetMirroringEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.MirroringEndpointGroup() + pb_resp = mirroring.MirroringEndpointGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_mirroring_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_mirroring_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = mirroring.MirroringEndpointGroup.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.get_mirroring_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetMirroringEndpointGroupAssociation( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroupAssociation, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.GetMirroringEndpointGroupAssociation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.GetMirroringEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.MirroringEndpointGroupAssociation: + r"""Call the get mirroring endpoint + group association method over HTTP. + + Args: + request (~.mirroring.GetMirroringEndpointGroupAssociationRequest): + The request object. Request message for + GetMirroringEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.MirroringEndpointGroupAssociation: + An endpoint group association + represents a link between a network and + an endpoint group in the organization. + + Creating an association creates the + networking infrastructure linking the + network to the endpoint group, but does + not enable mirroring by itself. To + enable mirroring, the user must also + create a network firewall policy + containing mirroring rules and associate + it with the network. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_get_mirroring_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseGetMirroringEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseGetMirroringEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetMirroringEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetMirroringEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.MirroringEndpointGroupAssociation() + pb_resp = mirroring.MirroringEndpointGroupAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_mirroring_endpoint_group_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_mirroring_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + mirroring.MirroringEndpointGroupAssociation.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.get_mirroring_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetMirroringEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMirroringDeploymentGroups( + _BaseMirroringRestTransport._BaseListMirroringDeploymentGroups, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.ListMirroringDeploymentGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.ListMirroringDeploymentGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.ListMirroringDeploymentGroupsResponse: + r"""Call the list mirroring deployment + groups method over HTTP. + + Args: + request (~.mirroring.ListMirroringDeploymentGroupsRequest): + The request object. Request message for + ListMirroringDeploymentGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.ListMirroringDeploymentGroupsResponse: + Response message for + ListMirroringDeploymentGroups. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListMirroringDeploymentGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_mirroring_deployment_groups( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseListMirroringDeploymentGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseListMirroringDeploymentGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListMirroringDeploymentGroups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringDeploymentGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._ListMirroringDeploymentGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.ListMirroringDeploymentGroupsResponse() + pb_resp = mirroring.ListMirroringDeploymentGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_mirroring_deployment_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_mirroring_deployment_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + mirroring.ListMirroringDeploymentGroupsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.list_mirroring_deployment_groups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringDeploymentGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMirroringDeployments( + _BaseMirroringRestTransport._BaseListMirroringDeployments, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.ListMirroringDeployments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.ListMirroringDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.ListMirroringDeploymentsResponse: + r"""Call the list mirroring + deployments method over HTTP. + + Args: + request (~.mirroring.ListMirroringDeploymentsRequest): + The request object. Request message for + ListMirroringDeployments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.ListMirroringDeploymentsResponse: + Response message for + ListMirroringDeployments. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListMirroringDeployments._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_mirroring_deployments( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseListMirroringDeployments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseListMirroringDeployments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListMirroringDeployments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringDeployments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._ListMirroringDeployments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.ListMirroringDeploymentsResponse() + pb_resp = mirroring.ListMirroringDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_mirroring_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_mirroring_deployments_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + mirroring.ListMirroringDeploymentsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.list_mirroring_deployments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringDeployments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMirroringEndpointGroupAssociations( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroupAssociations, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.ListMirroringEndpointGroupAssociations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.ListMirroringEndpointGroupAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.ListMirroringEndpointGroupAssociationsResponse: + r"""Call the list mirroring endpoint + group associations method over HTTP. + + Args: + request (~.mirroring.ListMirroringEndpointGroupAssociationsRequest): + The request object. Request message for + ListMirroringEndpointGroupAssociations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.ListMirroringEndpointGroupAssociationsResponse: + Response message for + ListMirroringEndpointGroupAssociations. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroupAssociations._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_mirroring_endpoint_group_associations( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseListMirroringEndpointGroupAssociations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseListMirroringEndpointGroupAssociations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListMirroringEndpointGroupAssociations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringEndpointGroupAssociations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._ListMirroringEndpointGroupAssociations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.ListMirroringEndpointGroupAssociationsResponse() + pb_resp = mirroring.ListMirroringEndpointGroupAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_mirroring_endpoint_group_associations( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_mirroring_endpoint_group_associations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = mirroring.ListMirroringEndpointGroupAssociationsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.list_mirroring_endpoint_group_associations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringEndpointGroupAssociations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMirroringEndpointGroups( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroups, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.ListMirroringEndpointGroups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: mirroring.ListMirroringEndpointGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> mirroring.ListMirroringEndpointGroupsResponse: + r"""Call the list mirroring endpoint + groups method over HTTP. + + Args: + request (~.mirroring.ListMirroringEndpointGroupsRequest): + The request object. Request message for + ListMirroringEndpointGroups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.mirroring.ListMirroringEndpointGroupsResponse: + Response message for + ListMirroringEndpointGroups. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_mirroring_endpoint_groups( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseListMirroringEndpointGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseListMirroringEndpointGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListMirroringEndpointGroups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringEndpointGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._ListMirroringEndpointGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = mirroring.ListMirroringEndpointGroupsResponse() + pb_resp = mirroring.ListMirroringEndpointGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_mirroring_endpoint_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_mirroring_endpoint_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + mirroring.ListMirroringEndpointGroupsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.list_mirroring_endpoint_groups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListMirroringEndpointGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateMirroringDeployment( + _BaseMirroringRestTransport._BaseUpdateMirroringDeployment, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.UpdateMirroringDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.UpdateMirroringDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update mirroring + deployment method over HTTP. + + Args: + request (~.mirroring.UpdateMirroringDeploymentRequest): + The request object. Request message for + UpdateMirroringDeployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseUpdateMirroringDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_mirroring_deployment( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseUpdateMirroringDeployment._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseUpdateMirroringDeployment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseUpdateMirroringDeployment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.UpdateMirroringDeployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._UpdateMirroringDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_mirroring_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_mirroring_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.update_mirroring_deployment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateMirroringDeploymentGroup( + _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup, + MirroringRestStub, + ): + def __hash__(self): + return hash("MirroringRestTransport.UpdateMirroringDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.UpdateMirroringDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update mirroring + deployment group method over HTTP. + + Args: + request (~.mirroring.UpdateMirroringDeploymentGroupRequest): + The request object. Request message for + UpdateMirroringDeploymentGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_mirroring_deployment_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.UpdateMirroringDeploymentGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._UpdateMirroringDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_mirroring_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_mirroring_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.update_mirroring_deployment_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateMirroringEndpointGroup( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.UpdateMirroringEndpointGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.UpdateMirroringEndpointGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update mirroring endpoint + group method over HTTP. + + Args: + request (~.mirroring.UpdateMirroringEndpointGroupRequest): + The request object. Request message for + UpdateMirroringEndpointGroup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_mirroring_endpoint_group( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.UpdateMirroringEndpointGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringEndpointGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MirroringRestTransport._UpdateMirroringEndpointGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_mirroring_endpoint_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_mirroring_endpoint_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.update_mirroring_endpoint_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringEndpointGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateMirroringEndpointGroupAssociation( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation, + MirroringRestStub, + ): + def __hash__(self): + return hash( + "MirroringRestTransport.UpdateMirroringEndpointGroupAssociation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: mirroring.UpdateMirroringEndpointGroupAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update mirroring endpoint + group association method over HTTP. + + Args: + request (~.mirroring.UpdateMirroringEndpointGroupAssociationRequest): + The request object. Request message for + UpdateMirroringEndpointGroupAssociation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_update_mirroring_endpoint_group_association( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.UpdateMirroringEndpointGroupAssociation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringEndpointGroupAssociation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._UpdateMirroringEndpointGroupAssociation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_mirroring_endpoint_group_association( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_mirroring_endpoint_group_association_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringClient.update_mirroring_endpoint_group_association", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "UpdateMirroringEndpointGroupAssociation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMirroringDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMirroringDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMirroringEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.CreateMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMirroringEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMirroringDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMirroringDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMirroringEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.DeleteMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMirroringEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentRequest], mirroring.MirroringDeployment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMirroringDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.GetMirroringDeploymentGroupRequest], + mirroring.MirroringDeploymentGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMirroringDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupRequest], mirroring.MirroringEndpointGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMirroringEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.GetMirroringEndpointGroupAssociationRequest], + mirroring.MirroringEndpointGroupAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMirroringEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_mirroring_deployment_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentGroupsRequest], + mirroring.ListMirroringDeploymentGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMirroringDeploymentGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_mirroring_deployments( + self, + ) -> Callable[ + [mirroring.ListMirroringDeploymentsRequest], + mirroring.ListMirroringDeploymentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMirroringDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_mirroring_endpoint_group_associations( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupAssociationsRequest], + mirroring.ListMirroringEndpointGroupAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMirroringEndpointGroupAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_mirroring_endpoint_groups( + self, + ) -> Callable[ + [mirroring.ListMirroringEndpointGroupsRequest], + mirroring.ListMirroringEndpointGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMirroringEndpointGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_mirroring_deployment( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMirroringDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_mirroring_deployment_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringDeploymentGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMirroringDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_mirroring_endpoint_group( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMirroringEndpointGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_mirroring_endpoint_group_association( + self, + ) -> Callable[ + [mirroring.UpdateMirroringEndpointGroupAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateMirroringEndpointGroupAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseMirroringRestTransport._BaseGetLocation, MirroringRestStub): + def __hash__(self): + return hash("MirroringRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseMirroringRestTransport._BaseListLocations, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseMirroringRestTransport._BaseGetIamPolicy, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseMirroringRestTransport._BaseSetIamPolicy, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseMirroringRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseMirroringRestTransport._BaseTestIamPermissions, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseMirroringRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMirroringRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseMirroringRestTransport._BaseCancelOperation, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseMirroringRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseMirroringRestTransport._BaseDeleteOperation, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMirroringRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseMirroringRestTransport._BaseGetOperation, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseMirroringRestTransport._BaseListOperations, MirroringRestStub + ): + def __hash__(self): + return hash("MirroringRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseMirroringRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = ( + _BaseMirroringRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMirroringRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.MirroringClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MirroringRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.MirroringAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MirroringRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest_base.py new file mode 100644 index 000000000000..4caf6fd6636c --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/mirroring/transports/rest_base.py @@ -0,0 +1,1447 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import mirroring + +from .base import DEFAULT_CLIENT_INFO, MirroringTransport + + +class _BaseMirroringRestTransport(MirroringTransport): + """Base REST backend transport for Mirroring. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateMirroringDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "mirroringDeploymentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringDeployments", + "body": "mirroring_deployment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.CreateMirroringDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseCreateMirroringDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateMirroringDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "mirroringDeploymentGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringDeploymentGroups", + "body": "mirroring_deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.CreateMirroringDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseCreateMirroringDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateMirroringEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "mirroringEndpointGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroups", + "body": "mirroring_endpoint_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.CreateMirroringEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateMirroringEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroupAssociations", + "body": "mirroring_endpoint_group_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.CreateMirroringEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseCreateMirroringEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteMirroringDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringDeployments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.DeleteMirroringDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseDeleteMirroringDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteMirroringDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringDeploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.DeleteMirroringDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseDeleteMirroringDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteMirroringEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.DeleteMirroringEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteMirroringEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.DeleteMirroringEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseDeleteMirroringEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMirroringDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringDeployments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.GetMirroringDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseGetMirroringDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMirroringDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringDeploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.GetMirroringDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseGetMirroringDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMirroringEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.GetMirroringEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetMirroringEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.GetMirroringEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseGetMirroringEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListMirroringDeploymentGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringDeploymentGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.ListMirroringDeploymentGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseListMirroringDeploymentGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListMirroringDeployments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringDeployments", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.ListMirroringDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseListMirroringDeployments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListMirroringEndpointGroupAssociations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroupAssociations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.ListMirroringEndpointGroupAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroupAssociations._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListMirroringEndpointGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.ListMirroringEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseListMirroringEndpointGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateMirroringDeployment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{mirroring_deployment.name=projects/*/locations/*/mirroringDeployments/*}", + "body": "mirroring_deployment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.UpdateMirroringDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseUpdateMirroringDeployment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateMirroringDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{mirroring_deployment_group.name=projects/*/locations/*/mirroringDeploymentGroups/*}", + "body": "mirroring_deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.UpdateMirroringDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseUpdateMirroringDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateMirroringEndpointGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{mirroring_endpoint_group.name=projects/*/locations/*/mirroringEndpointGroups/*}", + "body": "mirroring_endpoint_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.UpdateMirroringEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateMirroringEndpointGroupAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{mirroring_endpoint_group_association.name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}", + "body": "mirroring_endpoint_group_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = mirroring.UpdateMirroringEndpointGroupAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMirroringRestTransport._BaseUpdateMirroringEndpointGroupAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseMirroringRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/async_client.py index 87f86e89a61c..6fb6d4323de1 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/async_client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/async_client.py @@ -55,11 +55,42 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.network_security_v1alpha1.services.network_security import pagers +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy -from google.cloud.network_security_v1alpha1.types import common, tls +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .client import NetworkSecurityClient from .transports.base import DEFAULT_CLIENT_INFO, NetworkSecurityTransport @@ -90,10 +121,58 @@ class NetworkSecurityAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = NetworkSecurityClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = NetworkSecurityClient._DEFAULT_UNIVERSE + authorization_policy_path = staticmethod( + NetworkSecurityClient.authorization_policy_path + ) + parse_authorization_policy_path = staticmethod( + NetworkSecurityClient.parse_authorization_policy_path + ) + authz_policy_path = staticmethod(NetworkSecurityClient.authz_policy_path) + parse_authz_policy_path = staticmethod( + NetworkSecurityClient.parse_authz_policy_path + ) + backend_authentication_config_path = staticmethod( + NetworkSecurityClient.backend_authentication_config_path + ) + parse_backend_authentication_config_path = staticmethod( + NetworkSecurityClient.parse_backend_authentication_config_path + ) + ca_pool_path = staticmethod(NetworkSecurityClient.ca_pool_path) + parse_ca_pool_path = staticmethod(NetworkSecurityClient.parse_ca_pool_path) + certificate_path = staticmethod(NetworkSecurityClient.certificate_path) + parse_certificate_path = staticmethod(NetworkSecurityClient.parse_certificate_path) client_tls_policy_path = staticmethod(NetworkSecurityClient.client_tls_policy_path) parse_client_tls_policy_path = staticmethod( NetworkSecurityClient.parse_client_tls_policy_path ) + gateway_security_policy_path = staticmethod( + NetworkSecurityClient.gateway_security_policy_path + ) + parse_gateway_security_policy_path = staticmethod( + NetworkSecurityClient.parse_gateway_security_policy_path + ) + gateway_security_policy_rule_path = staticmethod( + NetworkSecurityClient.gateway_security_policy_rule_path + ) + parse_gateway_security_policy_rule_path = staticmethod( + NetworkSecurityClient.parse_gateway_security_policy_rule_path + ) + server_tls_policy_path = staticmethod(NetworkSecurityClient.server_tls_policy_path) + parse_server_tls_policy_path = staticmethod( + NetworkSecurityClient.parse_server_tls_policy_path + ) + tls_inspection_policy_path = staticmethod( + NetworkSecurityClient.tls_inspection_policy_path + ) + parse_tls_inspection_policy_path = staticmethod( + NetworkSecurityClient.parse_tls_inspection_policy_path + ) + trust_config_path = staticmethod(NetworkSecurityClient.trust_config_path) + parse_trust_config_path = staticmethod( + NetworkSecurityClient.parse_trust_config_path + ) + url_list_path = staticmethod(NetworkSecurityClient.url_list_path) + parse_url_list_path = staticmethod(NetworkSecurityClient.parse_url_list_path) common_billing_account_path = staticmethod( NetworkSecurityClient.common_billing_account_path ) @@ -308,18 +387,18 @@ def __init__( }, ) - async def list_client_tls_policies( + async def list_authorization_policies( self, request: Optional[ - Union[client_tls_policy.ListClientTlsPoliciesRequest, dict] + Union[authorization_policy.ListAuthorizationPoliciesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientTlsPoliciesAsyncPager: - r"""Lists ClientTlsPolicies in a given project and + ) -> pagers.ListAuthorizationPoliciesAsyncPager: + r"""Lists AuthorizationPolicies in a given project and location. .. code-block:: python @@ -333,30 +412,30 @@ async def list_client_tls_policies( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - async def sample_list_client_tls_policies(): + async def sample_list_authorization_policies(): # Create a client client = network_security_v1alpha1.NetworkSecurityAsyncClient() # Initialize request argument(s) - request = network_security_v1alpha1.ListClientTlsPoliciesRequest( + request = network_security_v1alpha1.ListAuthorizationPoliciesRequest( parent="parent_value", ) # Make the request - page_result = client.list_client_tls_policies(request=request) + page_result = client.list_authorization_policies(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest, dict]]): - The request object. Request used by the - ListClientTlsPolicies method. + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest, dict]]): + The request object. Request used with the + ListAuthorizationPolicies method. parent (:class:`str`): Required. The project and location from which the - ClientTlsPolicies should be listed, specified in the - format ``projects/*/locations/{location}``. + AuthorizationPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -370,9 +449,9 @@ async def sample_list_client_tls_policies(): be of type `bytes`. Returns: - google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesAsyncPager: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthorizationPoliciesAsyncPager: Response returned by the - ListClientTlsPolicies method. + ListAuthorizationPolicies method. Iterating over this object will yield results and resolve additional pages automatically. @@ -393,8 +472,10 @@ async def sample_list_client_tls_policies(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.ListClientTlsPoliciesRequest): - request = client_tls_policy.ListClientTlsPoliciesRequest(request) + if not isinstance( + request, authorization_policy.ListAuthorizationPoliciesRequest + ): + request = authorization_policy.ListAuthorizationPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -404,7 +485,7 @@ async def sample_list_client_tls_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_client_tls_policies + self._client._transport.list_authorization_policies ] # Certain fields should be provided within the metadata header; @@ -426,7 +507,7 @@ async def sample_list_client_tls_policies(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListClientTlsPoliciesAsyncPager( + response = pagers.ListAuthorizationPoliciesAsyncPager( method=rpc, request=request, response=response, @@ -438,18 +519,18 @@ async def sample_list_client_tls_policies(): # Done; return the response. return response - async def get_client_tls_policy( + async def get_authorization_policy( self, request: Optional[ - Union[client_tls_policy.GetClientTlsPolicyRequest, dict] + Union[authorization_policy.GetAuthorizationPolicyRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_tls_policy.ClientTlsPolicy: - r"""Gets details of a single ClientTlsPolicy. + ) -> authorization_policy.AuthorizationPolicy: + r"""Gets details of a single AuthorizationPolicy. .. code-block:: python @@ -462,29 +543,29 @@ async def get_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - async def sample_get_client_tls_policy(): + async def sample_get_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityAsyncClient() # Initialize request argument(s) - request = network_security_v1alpha1.GetClientTlsPolicyRequest( + request = network_security_v1alpha1.GetAuthorizationPolicyRequest( name="name_value", ) # Make the request - response = await client.get_client_tls_policy(request=request) + response = await client.get_authorization_policy(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest, dict]]): + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetAuthorizationPolicyRequest, dict]]): The request object. Request used by the - GetClientTlsPolicy method. + GetAuthorizationPolicy method. name (:class:`str`): - Required. A name of the ClientTlsPolicy to get. Must be - in the format - ``projects/*/locations/{location}/clientTlsPolicies/*``. + Required. A name of the AuthorizationPolicy to get. Must + be in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -498,13 +579,14 @@ async def sample_get_client_tls_policy(): be of type `bytes`. Returns: - google.cloud.network_security_v1alpha1.types.ClientTlsPolicy: - ClientTlsPolicy is a resource that - specifies how a client should - authenticate connections to backends of - a service. This resource itself does not - affect configuration unless it is - attached to a backend service resource. + google.cloud.network_security_v1alpha1.types.AuthorizationPolicy: + AuthorizationPolicy is a resource + that specifies how a server should + authorize incoming connections. This + resource in itself does not change the + configuration unless it's attached to a + target https proxy or endpoint config + selector resource. """ # Create or coerce a protobuf request object. @@ -522,8 +604,8 @@ async def sample_get_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.GetClientTlsPolicyRequest): - request = client_tls_policy.GetClientTlsPolicyRequest(request) + if not isinstance(request, authorization_policy.GetAuthorizationPolicyRequest): + request = authorization_policy.GetAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -533,7 +615,7 @@ async def sample_get_client_tls_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_client_tls_policy + self._client._transport.get_authorization_policy ] # Certain fields should be provided within the metadata header; @@ -556,21 +638,23 @@ async def sample_get_client_tls_policy(): # Done; return the response. return response - async def create_client_tls_policy( + async def create_authorization_policy( self, request: Optional[ - Union[gcn_client_tls_policy.CreateClientTlsPolicyRequest, dict] + Union[gcn_authorization_policy.CreateAuthorizationPolicyRequest, dict] ] = None, *, parent: Optional[str] = None, - client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, - client_tls_policy_id: Optional[str] = None, + authorization_policy: Optional[ + gcn_authorization_policy.AuthorizationPolicy + ] = None, + authorization_policy_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new ClientTlsPolicy in a given project and - location. + r"""Creates a new AuthorizationPolicy in a given project + and location. .. code-block:: python @@ -583,22 +667,23 @@ async def create_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - async def sample_create_client_tls_policy(): + async def sample_create_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityAsyncClient() # Initialize request argument(s) - client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() - client_tls_policy.name = "name_value" + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" - request = network_security_v1alpha1.CreateClientTlsPolicyRequest( + request = network_security_v1alpha1.CreateAuthorizationPolicyRequest( parent="parent_value", - client_tls_policy_id="client_tls_policy_id_value", - client_tls_policy=client_tls_policy, + authorization_policy_id="authorization_policy_id_value", + authorization_policy=authorization_policy, ) # Make the request - operation = client.create_client_tls_policy(request=request) + operation = client.create_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -608,32 +693,32 @@ async def sample_create_client_tls_policy(): print(response) Args: - request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest, dict]]): + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateAuthorizationPolicyRequest, dict]]): The request object. Request used by the - CreateClientTlsPolicy method. + CreateAuthorizationPolicy method. parent (:class:`str`): - Required. The parent resource of the ClientTlsPolicy. - Must be in the format - ``projects/*/locations/{location}``. + Required. The parent resource of the + AuthorizationPolicy. Must be in the format + ``projects/{project}/locations/{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - client_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy`): - Required. ClientTlsPolicy resource to - be created. + authorization_policy (:class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy`): + Required. AuthorizationPolicy + resource to be created. - This corresponds to the ``client_tls_policy`` field + This corresponds to the ``authorization_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - client_tls_policy_id (:class:`str`): - Required. Short name of the ClientTlsPolicy resource to - be created. This value should be 1-63 characters long, - containing only letters, numbers, hyphens, and + authorization_policy_id (:class:`str`): + Required. Short name of the AuthorizationPolicy resource + to be created. This value should be 1-63 characters + long, containing only letters, numbers, hyphens, and underscores, and should not start with a number. E.g. - "client_mtls_policy". + "authz_policy". - This corresponds to the ``client_tls_policy_id`` field + This corresponds to the ``authorization_policy_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -648,16 +733,17 @@ async def sample_create_client_tls_policy(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate - connections to backends of a service. This resource - itself does not affect configuration unless it is - attached to a backend service resource. + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy` AuthorizationPolicy is a resource that specifies how a server + should authorize incoming connections. This resource + in itself does not change the configuration unless + it's attached to a target https proxy or endpoint + config selector resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_tls_policy, client_tls_policy_id] + flattened_params = [parent, authorization_policy, authorization_policy_id] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -669,22 +755,24 @@ async def sample_create_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gcn_client_tls_policy.CreateClientTlsPolicyRequest): - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest(request) + if not isinstance( + request, gcn_authorization_policy.CreateAuthorizationPolicyRequest + ): + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if client_tls_policy is not None: - request.client_tls_policy = client_tls_policy - if client_tls_policy_id is not None: - request.client_tls_policy_id = client_tls_policy_id + if authorization_policy is not None: + request.authorization_policy = authorization_policy + if authorization_policy_id is not None: + request.authorization_policy_id = authorization_policy_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_client_tls_policy + self._client._transport.create_authorization_policy ] # Certain fields should be provided within the metadata header; @@ -708,26 +796,29 @@ async def sample_create_client_tls_policy(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - gcn_client_tls_policy.ClientTlsPolicy, + gcn_authorization_policy.AuthorizationPolicy, metadata_type=common.OperationMetadata, ) # Done; return the response. return response - async def update_client_tls_policy( + async def update_authorization_policy( self, request: Optional[ - Union[gcn_client_tls_policy.UpdateClientTlsPolicyRequest, dict] + Union[gcn_authorization_policy.UpdateAuthorizationPolicyRequest, dict] ] = None, *, - client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + authorization_policy: Optional[ + gcn_authorization_policy.AuthorizationPolicy + ] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single ClientTlsPolicy. + r"""Updates the parameters of a single + AuthorizationPolicy. .. code-block:: python @@ -740,20 +831,21 @@ async def update_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - async def sample_update_client_tls_policy(): + async def sample_update_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityAsyncClient() # Initialize request argument(s) - client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() - client_tls_policy.name = "name_value" + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" - request = network_security_v1alpha1.UpdateClientTlsPolicyRequest( - client_tls_policy=client_tls_policy, + request = network_security_v1alpha1.UpdateAuthorizationPolicyRequest( + authorization_policy=authorization_policy, ) # Make the request - operation = client.update_client_tls_policy(request=request) + operation = client.update_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -763,19 +855,19 @@ async def sample_update_client_tls_policy(): print(response) Args: - request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest, dict]]): - The request object. Request used by UpdateClientTlsPolicy - method. - client_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy`): - Required. Updated ClientTlsPolicy + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateAuthorizationPolicyRequest, dict]]): + The request object. Request used by the + UpdateAuthorizationPolicy method. + authorization_policy (:class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy`): + Required. Updated AuthorizationPolicy resource. - This corresponds to the ``client_tls_policy`` field + This corresponds to the ``authorization_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. Field mask is used to specify the fields to be - overwritten in the ClientTlsPolicy resource by the + overwritten in the AuthorizationPolicy resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user @@ -797,16 +889,17 @@ async def sample_update_client_tls_policy(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate - connections to backends of a service. This resource - itself does not affect configuration unless it is - attached to a backend service resource. + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy` AuthorizationPolicy is a resource that specifies how a server + should authorize incoming connections. This resource + in itself does not change the configuration unless + it's attached to a target https proxy or endpoint + config selector resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [client_tls_policy, update_mask] + flattened_params = [authorization_policy, update_mask] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -818,27 +911,29 @@ async def sample_update_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gcn_client_tls_policy.UpdateClientTlsPolicyRequest): - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest(request) + if not isinstance( + request, gcn_authorization_policy.UpdateAuthorizationPolicyRequest + ): + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if client_tls_policy is not None: - request.client_tls_policy = client_tls_policy + if authorization_policy is not None: + request.authorization_policy = authorization_policy if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.update_client_tls_policy + self._client._transport.update_authorization_policy ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("client_tls_policy.name", request.client_tls_policy.name),) + (("authorization_policy.name", request.authorization_policy.name),) ), ) @@ -857,17 +952,17 @@ async def sample_update_client_tls_policy(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - gcn_client_tls_policy.ClientTlsPolicy, + gcn_authorization_policy.AuthorizationPolicy, metadata_type=common.OperationMetadata, ) # Done; return the response. return response - async def delete_client_tls_policy( + async def delete_authorization_policy( self, request: Optional[ - Union[client_tls_policy.DeleteClientTlsPolicyRequest, dict] + Union[authorization_policy.DeleteAuthorizationPolicyRequest, dict] ] = None, *, name: Optional[str] = None, @@ -875,7 +970,7 @@ async def delete_client_tls_policy( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a single ClientTlsPolicy. + r"""Deletes a single AuthorizationPolicy. .. code-block:: python @@ -888,17 +983,17 @@ async def delete_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - async def sample_delete_client_tls_policy(): + async def sample_delete_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityAsyncClient() # Initialize request argument(s) - request = network_security_v1alpha1.DeleteClientTlsPolicyRequest( + request = network_security_v1alpha1.DeleteAuthorizationPolicyRequest( name="name_value", ) # Make the request - operation = client.delete_client_tls_policy(request=request) + operation = client.delete_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -908,13 +1003,13 @@ async def sample_delete_client_tls_policy(): print(response) Args: - request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest, dict]]): + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteAuthorizationPolicyRequest, dict]]): The request object. Request used by the - DeleteClientTlsPolicy method. + DeleteAuthorizationPolicy method. name (:class:`str`): - Required. A name of the ClientTlsPolicy to delete. Must - be in the format - ``projects/*/locations/{location}/clientTlsPolicies/*``. + Required. A name of the AuthorizationPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -958,8 +1053,10 @@ async def sample_delete_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.DeleteClientTlsPolicyRequest): - request = client_tls_policy.DeleteClientTlsPolicyRequest(request) + if not isinstance( + request, authorization_policy.DeleteAuthorizationPolicyRequest + ): + request = authorization_policy.DeleteAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -969,7 +1066,5738 @@ async def sample_delete_client_tls_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_client_tls_policy + self._client._transport.delete_authorization_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backend_authentication_configs( + self, + request: Optional[ + Union[ + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackendAuthenticationConfigsAsyncPager: + r"""Lists BackendAuthenticationConfigs in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_backend_authentication_configs(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backend_authentication_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest, dict]]): + The request object. Request used by the + ListBackendAuthenticationConfigs method. + parent (:class:`str`): + Required. The project and location from which the + BackendAuthenticationConfigs should be listed, specified + in the format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListBackendAuthenticationConfigsAsyncPager: + Response returned by the + ListBackendAuthenticationConfigs method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + ): + request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backend_authentication_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackendAuthenticationConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backend_authentication_config( + self, + request: Optional[ + Union[ + backend_authentication_config.GetBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backend_authentication_config.BackendAuthenticationConfig: + r"""Gets details of a single BackendAuthenticationConfig + to BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backend_authentication_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetBackendAuthenticationConfigRequest, dict]]): + The request object. Request used by the + GetBackendAuthenticationConfig method. + name (:class:`str`): + Required. A name of the BackendAuthenticationConfig to + get. Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig: + BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backend_authentication_config.GetBackendAuthenticationConfigRequest + ): + request = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backend_authentication_config( + self, + request: Optional[ + Union[ + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + backend_authentication_config: Optional[ + gcn_backend_authentication_config.BackendAuthenticationConfig + ] = None, + backend_authentication_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new BackendAuthenticationConfig in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + backend_authentication_config = network_security_v1alpha1.BackendAuthenticationConfig() + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.create_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateBackendAuthenticationConfigRequest, dict]]): + The request object. Request used by the + CreateBackendAuthenticationConfig + method. + parent (:class:`str`): + Required. The parent resource of the + BackendAuthenticationConfig. Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_authentication_config (:class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig`): + Required. BackendAuthenticationConfig + resource to be created. + + This corresponds to the ``backend_authentication_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_authentication_config_id (:class:`str`): + Required. Short name of the + BackendAuthenticationConfig resource to + be created. This value should be 1-63 + characters long, containing only + letters, numbers, hyphens, and + underscores, and should not start with a + number. E.g. "backend-auth-config". + + This corresponds to the ``backend_authentication_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig` BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + backend_authentication_config, + backend_authentication_config_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + ): + request = gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backend_authentication_config is not None: + request.backend_authentication_config = backend_authentication_config + if backend_authentication_config_id is not None: + request.backend_authentication_config_id = backend_authentication_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backend_authentication_config.BackendAuthenticationConfig, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backend_authentication_config( + self, + request: Optional[ + Union[ + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + backend_authentication_config: Optional[ + gcn_backend_authentication_config.BackendAuthenticationConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single + BackendAuthenticationConfig to + BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + backend_authentication_config = network_security_v1alpha1.BackendAuthenticationConfig() + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.UpdateBackendAuthenticationConfigRequest( + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.update_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateBackendAuthenticationConfigRequest, dict]]): + The request object. Request used by + UpdateBackendAuthenticationConfig + method. + backend_authentication_config (:class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig`): + Required. Updated + BackendAuthenticationConfig resource. + + This corresponds to the ``backend_authentication_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the BackendAuthenticationConfig resource + by the update. The fields specified in the update_mask + are relative to the resource, not the full request. A + field will be overwritten if it is in the mask. If the + user does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig` BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backend_authentication_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + ): + request = gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backend_authentication_config is not None: + request.backend_authentication_config = backend_authentication_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "backend_authentication_config.name", + request.backend_authentication_config.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_backend_authentication_config.BackendAuthenticationConfig, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backend_authentication_config( + self, + request: Optional[ + Union[ + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackendAuthenticationConfig to + BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteBackendAuthenticationConfigRequest, dict]]): + The request object. Request used by the + DeleteBackendAuthenticationConfig + method. + name (:class:`str`): + Required. A name of the BackendAuthenticationConfig to + delete. Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + ): + request = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_server_tls_policies( + self, + request: Optional[ + Union[server_tls_policy.ListServerTlsPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListServerTlsPoliciesAsyncPager: + r"""Lists ServerTlsPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_server_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListServerTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_server_tls_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest, dict]]): + The request object. Request used by the + ListServerTlsPolicies method. + parent (:class:`str`): + Required. The project and location from which the + ServerTlsPolicies should be listed, specified in the + format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListServerTlsPoliciesAsyncPager: + Response returned by the + ListServerTlsPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.ListServerTlsPoliciesRequest): + request = server_tls_policy.ListServerTlsPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_server_tls_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServerTlsPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_server_tls_policy( + self, + request: Optional[ + Union[server_tls_policy.GetServerTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> server_tls_policy.ServerTlsPolicy: + r"""Gets details of a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_server_tls_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetServerTlsPolicyRequest, dict]]): + The request object. Request used by the + GetServerTlsPolicy method. + name (:class:`str`): + Required. A name of the ServerTlsPolicy to get. Must be + in the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.ServerTlsPolicy: + ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.GetServerTlsPolicyRequest): + request = server_tls_policy.GetServerTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_server_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_server_tls_policy( + self, + request: Optional[ + Union[gcn_server_tls_policy.CreateServerTlsPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + server_tls_policy: Optional[gcn_server_tls_policy.ServerTlsPolicy] = None, + server_tls_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new ServerTlsPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.create_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateServerTlsPolicyRequest, dict]]): + The request object. Request used by the + CreateServerTlsPolicy method. + parent (:class:`str`): + Required. The parent resource of the ServerTlsPolicy. + Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + server_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy`): + Required. ServerTlsPolicy resource to + be created. + + This corresponds to the ``server_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + server_tls_policy_id (:class:`str`): + Required. Short name of the ServerTlsPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "server_mtls_policy". + + This corresponds to the ``server_tls_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy` ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, server_tls_policy, server_tls_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_server_tls_policy.CreateServerTlsPolicyRequest): + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if server_tls_policy is not None: + request.server_tls_policy = server_tls_policy + if server_tls_policy_id is not None: + request.server_tls_policy_id = server_tls_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_server_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_server_tls_policy.ServerTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_server_tls_policy( + self, + request: Optional[ + Union[gcn_server_tls_policy.UpdateServerTlsPolicyRequest, dict] + ] = None, + *, + server_tls_policy: Optional[gcn_server_tls_policy.ServerTlsPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateServerTlsPolicyRequest( + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.update_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateServerTlsPolicyRequest, dict]]): + The request object. Request used by UpdateServerTlsPolicy + method. + server_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy`): + Required. Updated ServerTlsPolicy + resource. + + This corresponds to the ``server_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the ServerTlsPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy` ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [server_tls_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_server_tls_policy.UpdateServerTlsPolicyRequest): + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if server_tls_policy is not None: + request.server_tls_policy = server_tls_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_server_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("server_tls_policy.name", request.server_tls_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_server_tls_policy.ServerTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_server_tls_policy( + self, + request: Optional[ + Union[server_tls_policy.DeleteServerTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteServerTlsPolicyRequest, dict]]): + The request object. Request used by the + DeleteServerTlsPolicy method. + name (:class:`str`): + Required. A name of the ServerTlsPolicy to delete. Must + be in the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.DeleteServerTlsPolicyRequest): + request = server_tls_policy.DeleteServerTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_server_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_client_tls_policies( + self, + request: Optional[ + Union[client_tls_policy.ListClientTlsPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListClientTlsPoliciesAsyncPager: + r"""Lists ClientTlsPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_client_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListClientTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_client_tls_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest, dict]]): + The request object. Request used by the + ListClientTlsPolicies method. + parent (:class:`str`): + Required. The project and location from which the + ClientTlsPolicies should be listed, specified in the + format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesAsyncPager: + Response returned by the + ListClientTlsPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.ListClientTlsPoliciesRequest): + request = client_tls_policy.ListClientTlsPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_client_tls_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClientTlsPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_client_tls_policy( + self, + request: Optional[ + Union[client_tls_policy.GetClientTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> client_tls_policy.ClientTlsPolicy: + r"""Gets details of a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetClientTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_client_tls_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest, dict]]): + The request object. Request used by the + GetClientTlsPolicy method. + name (:class:`str`): + Required. A name of the ClientTlsPolicy to get. Must be + in the format + ``projects/*/locations/{location}/clientTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.ClientTlsPolicy: + ClientTlsPolicy is a resource that + specifies how a client should + authenticate connections to backends of + a service. This resource itself does not + affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.GetClientTlsPolicyRequest): + request = client_tls_policy.GetClientTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_client_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_client_tls_policy( + self, + request: Optional[ + Union[gcn_client_tls_policy.CreateClientTlsPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + client_tls_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new ClientTlsPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() + client_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateClientTlsPolicyRequest( + parent="parent_value", + client_tls_policy_id="client_tls_policy_id_value", + client_tls_policy=client_tls_policy, + ) + + # Make the request + operation = client.create_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest, dict]]): + The request object. Request used by the + CreateClientTlsPolicy method. + parent (:class:`str`): + Required. The parent resource of the ClientTlsPolicy. + Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + client_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy`): + Required. ClientTlsPolicy resource to + be created. + + This corresponds to the ``client_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + client_tls_policy_id (:class:`str`): + Required. Short name of the ClientTlsPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "client_mtls_policy". + + This corresponds to the ``client_tls_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate + connections to backends of a service. This resource + itself does not affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, client_tls_policy, client_tls_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_client_tls_policy.CreateClientTlsPolicyRequest): + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if client_tls_policy is not None: + request.client_tls_policy = client_tls_policy + if client_tls_policy_id is not None: + request.client_tls_policy_id = client_tls_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_client_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_client_tls_policy.ClientTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_client_tls_policy( + self, + request: Optional[ + Union[gcn_client_tls_policy.UpdateClientTlsPolicyRequest, dict] + ] = None, + *, + client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() + client_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateClientTlsPolicyRequest( + client_tls_policy=client_tls_policy, + ) + + # Make the request + operation = client.update_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest, dict]]): + The request object. Request used by UpdateClientTlsPolicy + method. + client_tls_policy (:class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy`): + Required. Updated ClientTlsPolicy + resource. + + This corresponds to the ``client_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the ClientTlsPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate + connections to backends of a service. This resource + itself does not affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [client_tls_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_client_tls_policy.UpdateClientTlsPolicyRequest): + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if client_tls_policy is not None: + request.client_tls_policy = client_tls_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_client_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("client_tls_policy.name", request.client_tls_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_client_tls_policy.ClientTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_client_tls_policy( + self, + request: Optional[ + Union[client_tls_policy.DeleteClientTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteClientTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest, dict]]): + The request object. Request used by the + DeleteClientTlsPolicy method. + name (:class:`str`): + Required. A name of the ClientTlsPolicy to delete. Must + be in the format + ``projects/*/locations/{location}/clientTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.DeleteClientTlsPolicyRequest): + request = client_tls_policy.DeleteClientTlsPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_client_tls_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_gateway_security_policies( + self, + request: Optional[ + Union[gateway_security_policy.ListGatewaySecurityPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGatewaySecurityPoliciesAsyncPager: + r"""Lists GatewaySecurityPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_gateway_security_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest, dict]]): + The request object. Request used with the + ListGatewaySecurityPolicies method. + parent (:class:`str`): + Required. The project and location from which the + GatewaySecurityPolicies should be listed, specified in + the format ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPoliciesAsyncPager: + Response returned by the + ListGatewaySecurityPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.ListGatewaySecurityPoliciesRequest + ): + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_gateway_security_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGatewaySecurityPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_gateway_security_policy( + self, + request: Optional[ + Union[gateway_security_policy.GetGatewaySecurityPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy.GatewaySecurityPolicy: + r"""Gets details of a single GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_gateway_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRequest, dict]]): + The request object. Request used by the + GetGatewaySecurityPolicy method. + name (:class:`str`): + Required. A name of the GatewaySecurityPolicy to get. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy: + The GatewaySecurityPolicy resource + contains a collection of + GatewaySecurityPolicyRules and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.GetGatewaySecurityPolicyRequest + ): + request = gateway_security_policy.GetGatewaySecurityPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_gateway_security_policy( + self, + request: Optional[ + Union[gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + gateway_security_policy: Optional[ + gcn_gateway_security_policy.GatewaySecurityPolicy + ] = None, + gateway_security_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new GatewaySecurityPolicy in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.create_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRequest, dict]]): + The request object. Request used by the + CreateGatewaySecurityPolicy method. + parent (:class:`str`): + Required. The parent resource of the + GatewaySecurityPolicy. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy (:class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy`): + Required. GatewaySecurityPolicy + resource to be created. + + This corresponds to the ``gateway_security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_id (:class:`str`): + Required. Short name of the GatewaySecurityPolicy + resource to be created. This value should be 1-63 + characters long, containing only letters, numbers, + hyphens, and underscores, and should not start with a + number. E.g. "gateway_security_policy1". + + This corresponds to the ``gateway_security_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy` The GatewaySecurityPolicy resource contains a collection of + GatewaySecurityPolicyRules and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, gateway_security_policy, gateway_security_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest + ): + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if gateway_security_policy is not None: + request.gateway_security_policy = gateway_security_policy + if gateway_security_policy_id is not None: + request.gateway_security_policy_id = gateway_security_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_gateway_security_policy.GatewaySecurityPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_gateway_security_policy( + self, + request: Optional[ + Union[gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, dict] + ] = None, + *, + gateway_security_policy: Optional[ + gcn_gateway_security_policy.GatewaySecurityPolicy + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single + GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRequest( + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.update_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRequest, dict]]): + The request object. Request used by the + UpdateGatewaySecurityPolicy method. + gateway_security_policy (:class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy`): + Required. Updated + GatewaySecurityPolicy resource. + + This corresponds to the ``gateway_security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy` The GatewaySecurityPolicy resource contains a collection of + GatewaySecurityPolicyRules and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [gateway_security_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest + ): + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if gateway_security_policy is not None: + request.gateway_security_policy = gateway_security_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "gateway_security_policy.name", + request.gateway_security_policy.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_gateway_security_policy.GatewaySecurityPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_gateway_security_policy( + self, + request: Optional[ + Union[gateway_security_policy.DeleteGatewaySecurityPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRequest, dict]]): + The request object. Request used by the + DeleteGatewaySecurityPolicy method. + name (:class:`str`): + Required. A name of the GatewaySecurityPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.DeleteGatewaySecurityPolicyRequest + ): + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_gateway_security_policy_rules( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGatewaySecurityPolicyRulesAsyncPager: + r"""Lists GatewaySecurityPolicyRules in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_gateway_security_policy_rules(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policy_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest, dict]]): + The request object. Request used with the + ListGatewaySecurityPolicyRules method. + parent (:class:`str`): + Required. The project, location and + GatewaySecurityPolicy from which the + GatewaySecurityPolicyRules should be listed, specified + in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPolicyRulesAsyncPager: + Response returned by the + ListGatewaySecurityPolicyRules method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest + ): + request = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_gateway_security_policy_rules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGatewaySecurityPolicyRulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, dict + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy_rule.GatewaySecurityPolicyRule: + r"""Gets details of a single GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_gateway_security_policy_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRuleRequest, dict]]): + The request object. Request used by the + GetGatewaySecurityPolicyRule method. + name (:class:`str`): + Required. The name of the GatewaySecurityPolicyRule to + retrieve. Format: + projects/{project}/location/{location}/gatewaySecurityPolicies/*/rules/* + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule: + The GatewaySecurityPolicyRule + resource is in a nested collection + within a GatewaySecurityPolicy and + represents a traffic matching condition + and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest + ): + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + gateway_security_policy_rule: Optional[ + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule + ] = None, + gateway_security_policy_rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new GatewaySecurityPolicy in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.create_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRuleRequest, dict]]): + The request object. Methods for GatewaySecurityPolicy + RULES/GatewaySecurityPolicyRules. + Request used by the + CreateGatewaySecurityPolicyRule method. + parent (:class:`str`): + Required. The parent where this rule will be created. + Format : + projects/{project}/location/{location}/gatewaySecurityPolicies/\* + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_rule (:class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule`): + Required. The rule to be created. + This corresponds to the ``gateway_security_policy_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_rule_id (:class:`str`): + The ID to use for the rule, which will become the final + component of the rule's resource name. This value should + be 4-63 characters, and valid characters are + /[a-z][0-9]-/. + + This corresponds to the ``gateway_security_policy_rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule` The GatewaySecurityPolicyRule resource is in a nested collection within a + GatewaySecurityPolicy and represents a traffic + matching condition and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + gateway_security_policy_rule, + gateway_security_policy_rule_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + ): + request = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if gateway_security_policy_rule is not None: + request.gateway_security_policy_rule = gateway_security_policy_rule + if gateway_security_policy_rule_id is not None: + request.gateway_security_policy_rule_id = gateway_security_policy_rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + gateway_security_policy_rule: Optional[ + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single + GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRuleRequest( + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.update_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRuleRequest, dict]]): + The request object. Request used by the + UpdateGatewaySecurityPolicyRule method. + gateway_security_policy_rule (:class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule`): + Required. Updated + GatewaySecurityPolicyRule resource. + + This corresponds to the ``gateway_security_policy_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule` The GatewaySecurityPolicyRule resource is in a nested collection within a + GatewaySecurityPolicy and represents a traffic + matching condition and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [gateway_security_policy_rule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + ): + request = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if gateway_security_policy_rule is not None: + request.gateway_security_policy_rule = gateway_security_policy_rule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "gateway_security_policy_rule.name", + request.gateway_security_policy_rule.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRuleRequest, dict]]): + The request object. Request used by the + DeleteGatewaySecurityPolicyRule method. + name (:class:`str`): + Required. A name of the GatewaySecurityPolicyRule to + delete. Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}/rules/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest + ): + request = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_url_lists( + self, + request: Optional[Union[url_list.ListUrlListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListUrlListsAsyncPager: + r"""Lists UrlLists in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_url_lists(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListUrlListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_url_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListUrlListsRequest, dict]]): + The request object. Request used by the ListUrlList + method. + parent (:class:`str`): + Required. The project and location from which the + UrlLists should be listed, specified in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListUrlListsAsyncPager: + Response returned by the ListUrlLists + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.ListUrlListsRequest): + request = url_list.ListUrlListsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_url_lists + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUrlListsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_url_list( + self, + request: Optional[Union[url_list.GetUrlListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> url_list.UrlList: + r"""Gets details of a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetUrlListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_url_list(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetUrlListRequest, dict]]): + The request object. Request used by the GetUrlList + method. + name (:class:`str`): + Required. A name of the UrlList to get. Must be in the + format ``projects/*/locations/{location}/urlLists/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.UrlList: + UrlList proto helps users to set + reusable, independently manageable lists + of hosts, host patterns, URLs, URL + patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.GetUrlListRequest): + request = url_list.GetUrlListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_url_list + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_url_list( + self, + request: Optional[Union[gcn_url_list.CreateUrlListRequest, dict]] = None, + *, + parent: Optional[str] = None, + url_list: Optional[gcn_url_list.UrlList] = None, + url_list_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new UrlList in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ['values_value1', 'values_value2'] + + request = network_security_v1alpha1.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + url_list=url_list, + ) + + # Make the request + operation = client.create_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateUrlListRequest, dict]]): + The request object. Request used by the CreateUrlList + method. + parent (:class:`str`): + Required. The parent resource of the UrlList. Must be in + the format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_list (:class:`google.cloud.network_security_v1alpha1.types.UrlList`): + Required. UrlList resource to be + created. + + This corresponds to the ``url_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_list_id (:class:`str`): + Required. Short name of the UrlList resource to be + created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "url_list". + + This corresponds to the ``url_list_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.UrlList` UrlList proto helps users to set reusable, independently manageable lists + of hosts, host patterns, URLs, URL patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, url_list, url_list_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_url_list.CreateUrlListRequest): + request = gcn_url_list.CreateUrlListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if url_list is not None: + request.url_list = url_list + if url_list_id is not None: + request.url_list_id = url_list_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_url_list + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_url_list.UrlList, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_url_list( + self, + request: Optional[Union[gcn_url_list.UpdateUrlListRequest, dict]] = None, + *, + url_list: Optional[gcn_url_list.UrlList] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ['values_value1', 'values_value2'] + + request = network_security_v1alpha1.UpdateUrlListRequest( + url_list=url_list, + ) + + # Make the request + operation = client.update_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateUrlListRequest, dict]]): + The request object. Request used by UpdateUrlList method. + url_list (:class:`google.cloud.network_security_v1alpha1.types.UrlList`): + Required. Updated UrlList resource. + This corresponds to the ``url_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the UrlList resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.UrlList` UrlList proto helps users to set reusable, independently manageable lists + of hosts, host patterns, URLs, URL patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [url_list, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_url_list.UpdateUrlListRequest): + request = gcn_url_list.UpdateUrlListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if url_list is not None: + request.url_list = url_list + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_url_list + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("url_list.name", request.url_list.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_url_list.UrlList, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_url_list( + self, + request: Optional[Union[url_list.DeleteUrlListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteUrlListRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteUrlListRequest, dict]]): + The request object. Request used by the DeleteUrlList + method. + name (:class:`str`): + Required. A name of the UrlList to delete. Must be in + the format + ``projects/*/locations/{location}/urlLists/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.DeleteUrlListRequest): + request = url_list.DeleteUrlListRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_url_list + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_tls_inspection_policies( + self, + request: Optional[ + Union[tls_inspection_policy.ListTlsInspectionPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTlsInspectionPoliciesAsyncPager: + r"""Lists TlsInspectionPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_tls_inspection_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListTlsInspectionPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_inspection_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest, dict]]): + The request object. Request used with the + ListTlsInspectionPolicies method. + parent (:class:`str`): + Required. The project and location from which the + TlsInspectionPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListTlsInspectionPoliciesAsyncPager: + Response returned by the + ListTlsInspectionPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, tls_inspection_policy.ListTlsInspectionPoliciesRequest + ): + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_tls_inspection_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTlsInspectionPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tls_inspection_policy( + self, + request: Optional[ + Union[tls_inspection_policy.GetTlsInspectionPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tls_inspection_policy.TlsInspectionPolicy: + r"""Gets details of a single TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tls_inspection_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetTlsInspectionPolicyRequest, dict]]): + The request object. Request used by the + GetTlsInspectionPolicy method. + name (:class:`str`): + Required. A name of the TlsInspectionPolicy to get. Must + be in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy: + The TlsInspectionPolicy resource + contains references to CA pools in + Certificate Authority Service and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tls_inspection_policy.GetTlsInspectionPolicyRequest): + request = tls_inspection_policy.GetTlsInspectionPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tls_inspection_policy( + self, + request: Optional[ + Union[gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + tls_inspection_policy: Optional[ + gcn_tls_inspection_policy.TlsInspectionPolicy + ] = None, + tls_inspection_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new TlsInspectionPolicy in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.create_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateTlsInspectionPolicyRequest, dict]]): + The request object. Request used by the + CreateTlsInspectionPolicy method. + parent (:class:`str`): + Required. The parent resource of the + TlsInspectionPolicy. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tls_inspection_policy (:class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy`): + Required. TlsInspectionPolicy + resource to be created. + + This corresponds to the ``tls_inspection_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tls_inspection_policy_id (:class:`str`): + Required. Short name of the TlsInspectionPolicy resource + to be created. This value should be 1-63 characters + long, containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "tls_inspection_policy1". + + This corresponds to the ``tls_inspection_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy` The TlsInspectionPolicy resource contains references to CA pools in + Certificate Authority Service and associated + metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, tls_inspection_policy, tls_inspection_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest + ): + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tls_inspection_policy is not None: + request.tls_inspection_policy = tls_inspection_policy + if tls_inspection_policy_id is not None: + request.tls_inspection_policy_id = tls_inspection_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_tls_inspection_policy.TlsInspectionPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_tls_inspection_policy( + self, + request: Optional[ + Union[gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, dict] + ] = None, + *, + tls_inspection_policy: Optional[ + gcn_tls_inspection_policy.TlsInspectionPolicy + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single + TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.UpdateTlsInspectionPolicyRequest( + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.update_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateTlsInspectionPolicyRequest, dict]]): + The request object. Request used by the + UpdateTlsInspectionPolicy method. + tls_inspection_policy (:class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy`): + Required. Updated TlsInspectionPolicy + resource. + + This corresponds to the ``tls_inspection_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the TlsInspectionPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy` The TlsInspectionPolicy resource contains references to CA pools in + Certificate Authority Service and associated + metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [tls_inspection_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest + ): + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tls_inspection_policy is not None: + request.tls_inspection_policy = tls_inspection_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tls_inspection_policy.name", request.tls_inspection_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_tls_inspection_policy.TlsInspectionPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_tls_inspection_policy( + self, + request: Optional[ + Union[tls_inspection_policy.DeleteTlsInspectionPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteTlsInspectionPolicyRequest, dict]]): + The request object. Request used by the + DeleteTlsInspectionPolicy method. + name (:class:`str`): + Required. A name of the TlsInspectionPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, tls_inspection_policy.DeleteTlsInspectionPolicyRequest + ): + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_authz_policies( + self, + request: Optional[Union[authz_policy.ListAuthzPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAuthzPoliciesAsyncPager: + r"""Lists AuthzPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_authz_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthzPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authz_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest, dict]]): + The request object. Message for requesting list of ``AuthzPolicy`` + resources. + parent (:class:`str`): + Required. The project and location from which the + ``AuthzPolicy`` resources are listed, specified in the + following format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthzPoliciesAsyncPager: + Message for response to listing AuthzPolicy resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.ListAuthzPoliciesRequest): + request = authz_policy.ListAuthzPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_authz_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAuthzPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_authz_policy( + self, + request: Optional[Union[authz_policy.GetAuthzPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authz_policy.AuthzPolicy: + r"""Gets details of a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_authz_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetAuthzPolicyRequest, dict]]): + The request object. Message for getting a ``AuthzPolicy`` resource. + name (:class:`str`): + Required. A name of the ``AuthzPolicy`` resource to get. + Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.AuthzPolicy: + AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.GetAuthzPolicyRequest): + request = authz_policy.GetAuthzPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_authz_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_authz_policy( + self, + request: Optional[ + Union[gcn_authz_policy.CreateAuthzPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + authz_policy: Optional[gcn_authz_policy.AuthzPolicy] = None, + authz_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new AuthzPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ['resources_value1', 'resources_value2'] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + authz_policy=authz_policy, + ) + + # Make the request + operation = client.create_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateAuthzPolicyRequest, dict]]): + The request object. Message for creating an ``AuthzPolicy`` resource. + parent (:class:`str`): + Required. The parent resource of the ``AuthzPolicy`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authz_policy (:class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy`): + Required. ``AuthzPolicy`` resource to be created. + This corresponds to the ``authz_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authz_policy_id (:class:`str`): + Required. User-provided ID of the ``AuthzPolicy`` + resource to be created. + + This corresponds to the ``authz_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy` AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, authz_policy, authz_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_authz_policy.CreateAuthzPolicyRequest): + request = gcn_authz_policy.CreateAuthzPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if authz_policy is not None: + request.authz_policy = authz_policy + if authz_policy_id is not None: + request.authz_policy_id = authz_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_authz_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_authz_policy.AuthzPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_authz_policy( + self, + request: Optional[ + Union[gcn_authz_policy.UpdateAuthzPolicyRequest, dict] + ] = None, + *, + authz_policy: Optional[gcn_authz_policy.AuthzPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ['resources_value1', 'resources_value2'] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.UpdateAuthzPolicyRequest( + authz_policy=authz_policy, + ) + + # Make the request + operation = client.update_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateAuthzPolicyRequest, dict]]): + The request object. Message for updating an ``AuthzPolicy`` resource. + authz_policy (:class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy`): + Required. ``AuthzPolicy`` resource being updated. + This corresponds to the ``authz_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Used to specify the fields to be overwritten + in the ``AuthzPolicy`` resource by the update. The + fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field is + overwritten if it is in the mask. If the user does not + specify a mask, then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy` AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [authz_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_authz_policy.UpdateAuthzPolicyRequest): + request = gcn_authz_policy.UpdateAuthzPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if authz_policy is not None: + request.authz_policy = authz_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_authz_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("authz_policy.name", request.authz_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_authz_policy.AuthzPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_authz_policy( + self, + request: Optional[Union[authz_policy.DeleteAuthzPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteAuthzPolicyRequest, dict]]): + The request object. Message for deleting an ``AuthzPolicy`` resource. + name (:class:`str`): + Required. The name of the ``AuthzPolicy`` resource to + delete. Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.DeleteAuthzPolicyRequest): + request = authz_policy.DeleteAuthzPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_authz_policy ] # Certain fields should be provided within the metadata header; diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/client.py index 5e38e7288352..34754c47a2cc 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/client.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/client.py @@ -72,11 +72,42 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.network_security_v1alpha1.services.network_security import pagers +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy -from google.cloud.network_security_v1alpha1.types import common, tls +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .transports.base import DEFAULT_CLIENT_INFO, NetworkSecurityTransport from .transports.grpc import NetworkSecurityGrpcTransport @@ -213,6 +244,118 @@ def transport(self) -> NetworkSecurityTransport: """ return self._transport + @staticmethod + def authorization_policy_path( + project: str, + location: str, + authorization_policy: str, + ) -> str: + """Returns a fully-qualified authorization_policy string.""" + return "projects/{project}/locations/{location}/authorizationPolicies/{authorization_policy}".format( + project=project, + location=location, + authorization_policy=authorization_policy, + ) + + @staticmethod + def parse_authorization_policy_path(path: str) -> Dict[str, str]: + """Parses a authorization_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/authorizationPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def authz_policy_path( + project: str, + location: str, + authz_policy: str, + ) -> str: + """Returns a fully-qualified authz_policy string.""" + return "projects/{project}/locations/{location}/authzPolicies/{authz_policy}".format( + project=project, + location=location, + authz_policy=authz_policy, + ) + + @staticmethod + def parse_authz_policy_path(path: str) -> Dict[str, str]: + """Parses a authz_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/authzPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backend_authentication_config_path( + project: str, + location: str, + backend_authentication_config: str, + ) -> str: + """Returns a fully-qualified backend_authentication_config string.""" + return "projects/{project}/locations/{location}/backendAuthenticationConfigs/{backend_authentication_config}".format( + project=project, + location=location, + backend_authentication_config=backend_authentication_config, + ) + + @staticmethod + def parse_backend_authentication_config_path(path: str) -> Dict[str, str]: + """Parses a backend_authentication_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backendAuthenticationConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def ca_pool_path( + project: str, + location: str, + ca_pool: str, + ) -> str: + """Returns a fully-qualified ca_pool string.""" + return "projects/{project}/locations/{location}/caPools/{ca_pool}".format( + project=project, + location=location, + ca_pool=ca_pool, + ) + + @staticmethod + def parse_ca_pool_path(path: str) -> Dict[str, str]: + """Parses a ca_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/caPools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def certificate_path( + project: str, + location: str, + certificate: str, + ) -> str: + """Returns a fully-qualified certificate string.""" + return ( + "projects/{project}/locations/{location}/certificates/{certificate}".format( + project=project, + location=location, + certificate=certificate, + ) + ) + + @staticmethod + def parse_certificate_path(path: str) -> Dict[str, str]: + """Parses a certificate path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/certificates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def client_tls_policy_path( project: str, @@ -235,6 +378,140 @@ def parse_client_tls_policy_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def gateway_security_policy_path( + project: str, + location: str, + gateway_security_policy: str, + ) -> str: + """Returns a fully-qualified gateway_security_policy string.""" + return "projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy}".format( + project=project, + location=location, + gateway_security_policy=gateway_security_policy, + ) + + @staticmethod + def parse_gateway_security_policy_path(path: str) -> Dict[str, str]: + """Parses a gateway_security_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/gatewaySecurityPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def gateway_security_policy_rule_path( + project: str, + location: str, + gateway_security_policy: str, + rule: str, + ) -> str: + """Returns a fully-qualified gateway_security_policy_rule string.""" + return "projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy}/rules/{rule}".format( + project=project, + location=location, + gateway_security_policy=gateway_security_policy, + rule=rule, + ) + + @staticmethod + def parse_gateway_security_policy_rule_path(path: str) -> Dict[str, str]: + """Parses a gateway_security_policy_rule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/gatewaySecurityPolicies/(?P.+?)/rules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def server_tls_policy_path( + project: str, + location: str, + server_tls_policy: str, + ) -> str: + """Returns a fully-qualified server_tls_policy string.""" + return "projects/{project}/locations/{location}/serverTlsPolicies/{server_tls_policy}".format( + project=project, + location=location, + server_tls_policy=server_tls_policy, + ) + + @staticmethod + def parse_server_tls_policy_path(path: str) -> Dict[str, str]: + """Parses a server_tls_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/serverTlsPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def tls_inspection_policy_path( + project: str, + location: str, + tls_inspection_policy: str, + ) -> str: + """Returns a fully-qualified tls_inspection_policy string.""" + return "projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}".format( + project=project, + location=location, + tls_inspection_policy=tls_inspection_policy, + ) + + @staticmethod + def parse_tls_inspection_policy_path(path: str) -> Dict[str, str]: + """Parses a tls_inspection_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/tlsInspectionPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def trust_config_path( + project: str, + location: str, + trust_config: str, + ) -> str: + """Returns a fully-qualified trust_config string.""" + return "projects/{project}/locations/{location}/trustConfigs/{trust_config}".format( + project=project, + location=location, + trust_config=trust_config, + ) + + @staticmethod + def parse_trust_config_path(path: str) -> Dict[str, str]: + """Parses a trust_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/trustConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def url_list_path( + project: str, + location: str, + url_list: str, + ) -> str: + """Returns a fully-qualified url_list string.""" + return "projects/{project}/locations/{location}/urlLists/{url_list}".format( + project=project, + location=location, + url_list=url_list, + ) + + @staticmethod + def parse_url_list_path(path: str) -> Dict[str, str]: + """Parses a url_list path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/urlLists/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -729,18 +1006,18 @@ def __init__( }, ) - def list_client_tls_policies( + def list_authorization_policies( self, request: Optional[ - Union[client_tls_policy.ListClientTlsPoliciesRequest, dict] + Union[authorization_policy.ListAuthorizationPoliciesRequest, dict] ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientTlsPoliciesPager: - r"""Lists ClientTlsPolicies in a given project and + ) -> pagers.ListAuthorizationPoliciesPager: + r"""Lists AuthorizationPolicies in a given project and location. .. code-block:: python @@ -754,30 +1031,30 @@ def list_client_tls_policies( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - def sample_list_client_tls_policies(): + def sample_list_authorization_policies(): # Create a client client = network_security_v1alpha1.NetworkSecurityClient() # Initialize request argument(s) - request = network_security_v1alpha1.ListClientTlsPoliciesRequest( + request = network_security_v1alpha1.ListAuthorizationPoliciesRequest( parent="parent_value", ) # Make the request - page_result = client.list_client_tls_policies(request=request) + page_result = client.list_authorization_policies(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest, dict]): - The request object. Request used by the - ListClientTlsPolicies method. + request (Union[google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest, dict]): + The request object. Request used with the + ListAuthorizationPolicies method. parent (str): Required. The project and location from which the - ClientTlsPolicies should be listed, specified in the - format ``projects/*/locations/{location}``. + AuthorizationPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -791,9 +1068,9 @@ def sample_list_client_tls_policies(): be of type `bytes`. Returns: - google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesPager: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthorizationPoliciesPager: Response returned by the - ListClientTlsPolicies method. + ListAuthorizationPolicies method. Iterating over this object will yield results and resolve additional pages automatically. @@ -814,8 +1091,10 @@ def sample_list_client_tls_policies(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.ListClientTlsPoliciesRequest): - request = client_tls_policy.ListClientTlsPoliciesRequest(request) + if not isinstance( + request, authorization_policy.ListAuthorizationPoliciesRequest + ): + request = authorization_policy.ListAuthorizationPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -823,7 +1102,9 @@ def sample_list_client_tls_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_client_tls_policies] + rpc = self._transport._wrapped_methods[ + self._transport.list_authorization_policies + ] # Certain fields should be provided within the metadata header; # add these here. @@ -844,7 +1125,7 @@ def sample_list_client_tls_policies(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListClientTlsPoliciesPager( + response = pagers.ListAuthorizationPoliciesPager( method=rpc, request=request, response=response, @@ -856,18 +1137,18 @@ def sample_list_client_tls_policies(): # Done; return the response. return response - def get_client_tls_policy( + def get_authorization_policy( self, request: Optional[ - Union[client_tls_policy.GetClientTlsPolicyRequest, dict] + Union[authorization_policy.GetAuthorizationPolicyRequest, dict] ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_tls_policy.ClientTlsPolicy: - r"""Gets details of a single ClientTlsPolicy. + ) -> authorization_policy.AuthorizationPolicy: + r"""Gets details of a single AuthorizationPolicy. .. code-block:: python @@ -880,29 +1161,29 @@ def get_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - def sample_get_client_tls_policy(): + def sample_get_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityClient() # Initialize request argument(s) - request = network_security_v1alpha1.GetClientTlsPolicyRequest( + request = network_security_v1alpha1.GetAuthorizationPolicyRequest( name="name_value", ) # Make the request - response = client.get_client_tls_policy(request=request) + response = client.get_authorization_policy(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest, dict]): + request (Union[google.cloud.network_security_v1alpha1.types.GetAuthorizationPolicyRequest, dict]): The request object. Request used by the - GetClientTlsPolicy method. + GetAuthorizationPolicy method. name (str): - Required. A name of the ClientTlsPolicy to get. Must be - in the format - ``projects/*/locations/{location}/clientTlsPolicies/*``. + Required. A name of the AuthorizationPolicy to get. Must + be in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -916,13 +1197,14 @@ def sample_get_client_tls_policy(): be of type `bytes`. Returns: - google.cloud.network_security_v1alpha1.types.ClientTlsPolicy: - ClientTlsPolicy is a resource that - specifies how a client should - authenticate connections to backends of - a service. This resource itself does not - affect configuration unless it is - attached to a backend service resource. + google.cloud.network_security_v1alpha1.types.AuthorizationPolicy: + AuthorizationPolicy is a resource + that specifies how a server should + authorize incoming connections. This + resource in itself does not change the + configuration unless it's attached to a + target https proxy or endpoint config + selector resource. """ # Create or coerce a protobuf request object. @@ -940,8 +1222,8 @@ def sample_get_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.GetClientTlsPolicyRequest): - request = client_tls_policy.GetClientTlsPolicyRequest(request) + if not isinstance(request, authorization_policy.GetAuthorizationPolicyRequest): + request = authorization_policy.GetAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -949,7 +1231,7 @@ def sample_get_client_tls_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_client_tls_policy] + rpc = self._transport._wrapped_methods[self._transport.get_authorization_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -971,21 +1253,23 @@ def sample_get_client_tls_policy(): # Done; return the response. return response - def create_client_tls_policy( + def create_authorization_policy( self, request: Optional[ - Union[gcn_client_tls_policy.CreateClientTlsPolicyRequest, dict] + Union[gcn_authorization_policy.CreateAuthorizationPolicyRequest, dict] ] = None, *, parent: Optional[str] = None, - client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, - client_tls_policy_id: Optional[str] = None, + authorization_policy: Optional[ + gcn_authorization_policy.AuthorizationPolicy + ] = None, + authorization_policy_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: - r"""Creates a new ClientTlsPolicy in a given project and - location. + r"""Creates a new AuthorizationPolicy in a given project + and location. .. code-block:: python @@ -998,22 +1282,23 @@ def create_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - def sample_create_client_tls_policy(): + def sample_create_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityClient() # Initialize request argument(s) - client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() - client_tls_policy.name = "name_value" + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" - request = network_security_v1alpha1.CreateClientTlsPolicyRequest( + request = network_security_v1alpha1.CreateAuthorizationPolicyRequest( parent="parent_value", - client_tls_policy_id="client_tls_policy_id_value", - client_tls_policy=client_tls_policy, + authorization_policy_id="authorization_policy_id_value", + authorization_policy=authorization_policy, ) # Make the request - operation = client.create_client_tls_policy(request=request) + operation = client.create_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -1023,32 +1308,32 @@ def sample_create_client_tls_policy(): print(response) Args: - request (Union[google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest, dict]): + request (Union[google.cloud.network_security_v1alpha1.types.CreateAuthorizationPolicyRequest, dict]): The request object. Request used by the - CreateClientTlsPolicy method. + CreateAuthorizationPolicy method. parent (str): - Required. The parent resource of the ClientTlsPolicy. - Must be in the format - ``projects/*/locations/{location}``. + Required. The parent resource of the + AuthorizationPolicy. Must be in the format + ``projects/{project}/locations/{location}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - client_tls_policy (google.cloud.network_security_v1alpha1.types.ClientTlsPolicy): - Required. ClientTlsPolicy resource to - be created. + authorization_policy (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy): + Required. AuthorizationPolicy + resource to be created. - This corresponds to the ``client_tls_policy`` field + This corresponds to the ``authorization_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - client_tls_policy_id (str): - Required. Short name of the ClientTlsPolicy resource to - be created. This value should be 1-63 characters long, - containing only letters, numbers, hyphens, and + authorization_policy_id (str): + Required. Short name of the AuthorizationPolicy resource + to be created. This value should be 1-63 characters + long, containing only letters, numbers, hyphens, and underscores, and should not start with a number. E.g. - "client_mtls_policy". + "authz_policy". - This corresponds to the ``client_tls_policy_id`` field + This corresponds to the ``authorization_policy_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1063,16 +1348,17 @@ def sample_create_client_tls_policy(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate - connections to backends of a service. This resource - itself does not affect configuration unless it is - attached to a backend service resource. + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy` AuthorizationPolicy is a resource that specifies how a server + should authorize incoming connections. This resource + in itself does not change the configuration unless + it's attached to a target https proxy or endpoint + config selector resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_tls_policy, client_tls_policy_id] + flattened_params = [parent, authorization_policy, authorization_policy_id] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1084,20 +1370,24 @@ def sample_create_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gcn_client_tls_policy.CreateClientTlsPolicyRequest): - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest(request) + if not isinstance( + request, gcn_authorization_policy.CreateAuthorizationPolicyRequest + ): + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent - if client_tls_policy is not None: - request.client_tls_policy = client_tls_policy - if client_tls_policy_id is not None: - request.client_tls_policy_id = client_tls_policy_id + if authorization_policy is not None: + request.authorization_policy = authorization_policy + if authorization_policy_id is not None: + request.authorization_policy_id = authorization_policy_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_client_tls_policy] + rpc = self._transport._wrapped_methods[ + self._transport.create_authorization_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1120,26 +1410,29 @@ def sample_create_client_tls_policy(): response = operation.from_gapic( response, self._transport.operations_client, - gcn_client_tls_policy.ClientTlsPolicy, + gcn_authorization_policy.AuthorizationPolicy, metadata_type=common.OperationMetadata, ) # Done; return the response. return response - def update_client_tls_policy( + def update_authorization_policy( self, request: Optional[ - Union[gcn_client_tls_policy.UpdateClientTlsPolicyRequest, dict] + Union[gcn_authorization_policy.UpdateAuthorizationPolicyRequest, dict] ] = None, *, - client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + authorization_policy: Optional[ + gcn_authorization_policy.AuthorizationPolicy + ] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: - r"""Updates the parameters of a single ClientTlsPolicy. + r"""Updates the parameters of a single + AuthorizationPolicy. .. code-block:: python @@ -1152,20 +1445,21 @@ def update_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - def sample_update_client_tls_policy(): + def sample_update_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityClient() # Initialize request argument(s) - client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() - client_tls_policy.name = "name_value" + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" - request = network_security_v1alpha1.UpdateClientTlsPolicyRequest( - client_tls_policy=client_tls_policy, + request = network_security_v1alpha1.UpdateAuthorizationPolicyRequest( + authorization_policy=authorization_policy, ) # Make the request - operation = client.update_client_tls_policy(request=request) + operation = client.update_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -1175,19 +1469,19 @@ def sample_update_client_tls_policy(): print(response) Args: - request (Union[google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest, dict]): - The request object. Request used by UpdateClientTlsPolicy - method. - client_tls_policy (google.cloud.network_security_v1alpha1.types.ClientTlsPolicy): - Required. Updated ClientTlsPolicy + request (Union[google.cloud.network_security_v1alpha1.types.UpdateAuthorizationPolicyRequest, dict]): + The request object. Request used by the + UpdateAuthorizationPolicy method. + authorization_policy (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy): + Required. Updated AuthorizationPolicy resource. - This corresponds to the ``client_tls_policy`` field + This corresponds to the ``authorization_policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask is used to specify the fields to be - overwritten in the ClientTlsPolicy resource by the + overwritten in the AuthorizationPolicy resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user @@ -1209,16 +1503,17 @@ def sample_update_client_tls_policy(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate - connections to backends of a service. This resource - itself does not affect configuration unless it is - attached to a backend service resource. + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthorizationPolicy` AuthorizationPolicy is a resource that specifies how a server + should authorize incoming connections. This resource + in itself does not change the configuration unless + it's attached to a target https proxy or endpoint + config selector resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [client_tls_policy, update_mask] + flattened_params = [authorization_policy, update_mask] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1230,24 +1525,28 @@ def sample_update_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, gcn_client_tls_policy.UpdateClientTlsPolicyRequest): - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest(request) + if not isinstance( + request, gcn_authorization_policy.UpdateAuthorizationPolicyRequest + ): + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if client_tls_policy is not None: - request.client_tls_policy = client_tls_policy + if authorization_policy is not None: + request.authorization_policy = authorization_policy if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_client_tls_policy] + rpc = self._transport._wrapped_methods[ + self._transport.update_authorization_policy + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("client_tls_policy.name", request.client_tls_policy.name),) + (("authorization_policy.name", request.authorization_policy.name),) ), ) @@ -1266,17 +1565,17 @@ def sample_update_client_tls_policy(): response = operation.from_gapic( response, self._transport.operations_client, - gcn_client_tls_policy.ClientTlsPolicy, + gcn_authorization_policy.AuthorizationPolicy, metadata_type=common.OperationMetadata, ) # Done; return the response. return response - def delete_client_tls_policy( + def delete_authorization_policy( self, request: Optional[ - Union[client_tls_policy.DeleteClientTlsPolicyRequest, dict] + Union[authorization_policy.DeleteAuthorizationPolicyRequest, dict] ] = None, *, name: Optional[str] = None, @@ -1284,7 +1583,7 @@ def delete_client_tls_policy( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: - r"""Deletes a single ClientTlsPolicy. + r"""Deletes a single AuthorizationPolicy. .. code-block:: python @@ -1297,17 +1596,17 @@ def delete_client_tls_policy( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import network_security_v1alpha1 - def sample_delete_client_tls_policy(): + def sample_delete_authorization_policy(): # Create a client client = network_security_v1alpha1.NetworkSecurityClient() # Initialize request argument(s) - request = network_security_v1alpha1.DeleteClientTlsPolicyRequest( + request = network_security_v1alpha1.DeleteAuthorizationPolicyRequest( name="name_value", ) # Make the request - operation = client.delete_client_tls_policy(request=request) + operation = client.delete_authorization_policy(request=request) print("Waiting for operation to complete...") @@ -1317,13 +1616,13 @@ def sample_delete_client_tls_policy(): print(response) Args: - request (Union[google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest, dict]): + request (Union[google.cloud.network_security_v1alpha1.types.DeleteAuthorizationPolicyRequest, dict]): The request object. Request used by the - DeleteClientTlsPolicy method. + DeleteAuthorizationPolicy method. name (str): - Required. A name of the ClientTlsPolicy to delete. Must - be in the format - ``projects/*/locations/{location}/clientTlsPolicies/*``. + Required. A name of the AuthorizationPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1367,8 +1666,10 @@ def sample_delete_client_tls_policy(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, client_tls_policy.DeleteClientTlsPolicyRequest): - request = client_tls_policy.DeleteClientTlsPolicyRequest(request) + if not isinstance( + request, authorization_policy.DeleteAuthorizationPolicyRequest + ): + request = authorization_policy.DeleteAuthorizationPolicyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -1376,7 +1677,5664 @@ def sample_delete_client_tls_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_client_tls_policy] + rpc = self._transport._wrapped_methods[ + self._transport.delete_authorization_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backend_authentication_configs( + self, + request: Optional[ + Union[ + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackendAuthenticationConfigsPager: + r"""Lists BackendAuthenticationConfigs in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_backend_authentication_configs(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backend_authentication_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest, dict]): + The request object. Request used by the + ListBackendAuthenticationConfigs method. + parent (str): + Required. The project and location from which the + BackendAuthenticationConfigs should be listed, specified + in the format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListBackendAuthenticationConfigsPager: + Response returned by the + ListBackendAuthenticationConfigs method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + ): + request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backend_authentication_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackendAuthenticationConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backend_authentication_config( + self, + request: Optional[ + Union[ + backend_authentication_config.GetBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backend_authentication_config.BackendAuthenticationConfig: + r"""Gets details of a single BackendAuthenticationConfig + to BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_backend_authentication_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetBackendAuthenticationConfigRequest, dict]): + The request object. Request used by the + GetBackendAuthenticationConfig method. + name (str): + Required. A name of the BackendAuthenticationConfig to + get. Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig: + BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backend_authentication_config.GetBackendAuthenticationConfigRequest + ): + request = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backend_authentication_config( + self, + request: Optional[ + Union[ + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + backend_authentication_config: Optional[ + gcn_backend_authentication_config.BackendAuthenticationConfig + ] = None, + backend_authentication_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new BackendAuthenticationConfig in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + backend_authentication_config = network_security_v1alpha1.BackendAuthenticationConfig() + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.create_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateBackendAuthenticationConfigRequest, dict]): + The request object. Request used by the + CreateBackendAuthenticationConfig + method. + parent (str): + Required. The parent resource of the + BackendAuthenticationConfig. Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_authentication_config (google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig): + Required. BackendAuthenticationConfig + resource to be created. + + This corresponds to the ``backend_authentication_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_authentication_config_id (str): + Required. Short name of the + BackendAuthenticationConfig resource to + be created. This value should be 1-63 + characters long, containing only + letters, numbers, hyphens, and + underscores, and should not start with a + number. E.g. "backend-auth-config". + + This corresponds to the ``backend_authentication_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig` BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + backend_authentication_config, + backend_authentication_config_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + ): + request = gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backend_authentication_config is not None: + request.backend_authentication_config = backend_authentication_config + if backend_authentication_config_id is not None: + request.backend_authentication_config_id = ( + backend_authentication_config_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backend_authentication_config.BackendAuthenticationConfig, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backend_authentication_config( + self, + request: Optional[ + Union[ + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + backend_authentication_config: Optional[ + gcn_backend_authentication_config.BackendAuthenticationConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single + BackendAuthenticationConfig to + BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + backend_authentication_config = network_security_v1alpha1.BackendAuthenticationConfig() + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.UpdateBackendAuthenticationConfigRequest( + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.update_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateBackendAuthenticationConfigRequest, dict]): + The request object. Request used by + UpdateBackendAuthenticationConfig + method. + backend_authentication_config (google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig): + Required. Updated + BackendAuthenticationConfig resource. + + This corresponds to the ``backend_authentication_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the BackendAuthenticationConfig resource + by the update. The fields specified in the update_mask + are relative to the resource, not the full request. A + field will be overwritten if it is in the mask. If the + user does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig` BackendAuthenticationConfig message groups the TrustConfig together with + other settings that control how the load balancer + authenticates, and expresses its identity to, the + backend: + + - trustConfig is the attached TrustConfig. + + \* wellKnownRoots indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + \* clientCertificate is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining + how that BackendService negotiates TLS. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backend_authentication_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + ): + request = gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backend_authentication_config is not None: + request.backend_authentication_config = backend_authentication_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "backend_authentication_config.name", + request.backend_authentication_config.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_backend_authentication_config.BackendAuthenticationConfig, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backend_authentication_config( + self, + request: Optional[ + Union[ + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single BackendAuthenticationConfig to + BackendAuthenticationConfig. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteBackendAuthenticationConfigRequest, dict]): + The request object. Request used by the + DeleteBackendAuthenticationConfig + method. + name (str): + Required. A name of the BackendAuthenticationConfig to + delete. Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + ): + request = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_backend_authentication_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_server_tls_policies( + self, + request: Optional[ + Union[server_tls_policy.ListServerTlsPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListServerTlsPoliciesPager: + r"""Lists ServerTlsPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_server_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListServerTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_server_tls_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest, dict]): + The request object. Request used by the + ListServerTlsPolicies method. + parent (str): + Required. The project and location from which the + ServerTlsPolicies should be listed, specified in the + format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListServerTlsPoliciesPager: + Response returned by the + ListServerTlsPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.ListServerTlsPoliciesRequest): + request = server_tls_policy.ListServerTlsPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_server_tls_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServerTlsPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_server_tls_policy( + self, + request: Optional[ + Union[server_tls_policy.GetServerTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> server_tls_policy.ServerTlsPolicy: + r"""Gets details of a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_server_tls_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetServerTlsPolicyRequest, dict]): + The request object. Request used by the + GetServerTlsPolicy method. + name (str): + Required. A name of the ServerTlsPolicy to get. Must be + in the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.ServerTlsPolicy: + ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.GetServerTlsPolicyRequest): + request = server_tls_policy.GetServerTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_server_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_server_tls_policy( + self, + request: Optional[ + Union[gcn_server_tls_policy.CreateServerTlsPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + server_tls_policy: Optional[gcn_server_tls_policy.ServerTlsPolicy] = None, + server_tls_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new ServerTlsPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.create_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateServerTlsPolicyRequest, dict]): + The request object. Request used by the + CreateServerTlsPolicy method. + parent (str): + Required. The parent resource of the ServerTlsPolicy. + Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + server_tls_policy (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy): + Required. ServerTlsPolicy resource to + be created. + + This corresponds to the ``server_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + server_tls_policy_id (str): + Required. Short name of the ServerTlsPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "server_mtls_policy". + + This corresponds to the ``server_tls_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy` ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, server_tls_policy, server_tls_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_server_tls_policy.CreateServerTlsPolicyRequest): + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if server_tls_policy is not None: + request.server_tls_policy = server_tls_policy + if server_tls_policy_id is not None: + request.server_tls_policy_id = server_tls_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_server_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_server_tls_policy.ServerTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_server_tls_policy( + self, + request: Optional[ + Union[gcn_server_tls_policy.UpdateServerTlsPolicyRequest, dict] + ] = None, + *, + server_tls_policy: Optional[gcn_server_tls_policy.ServerTlsPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateServerTlsPolicyRequest( + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.update_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateServerTlsPolicyRequest, dict]): + The request object. Request used by UpdateServerTlsPolicy + method. + server_tls_policy (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy): + Required. Updated ServerTlsPolicy + resource. + + This corresponds to the ``server_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the ServerTlsPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ServerTlsPolicy` ServerTlsPolicy is a resource that specifies how a server should authenticate + incoming requests. This resource itself does not + affect configuration unless it is attached to a + target HTTPS proxy or endpoint config selector + resource. + + ServerTlsPolicy in the form accepted by Application + Load Balancers can be attached only to + TargetHttpsProxy with an EXTERNAL, EXTERNAL_MANAGED + or INTERNAL_MANAGED load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached + to EndpointPolicy and TargetHttpsProxy with Traffic + Director INTERNAL_SELF_MANAGED load balancing scheme. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [server_tls_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_server_tls_policy.UpdateServerTlsPolicyRequest): + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if server_tls_policy is not None: + request.server_tls_policy = server_tls_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_server_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("server_tls_policy.name", request.server_tls_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_server_tls_policy.ServerTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_server_tls_policy( + self, + request: Optional[ + Union[server_tls_policy.DeleteServerTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single ServerTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteServerTlsPolicyRequest, dict]): + The request object. Request used by the + DeleteServerTlsPolicy method. + name (str): + Required. A name of the ServerTlsPolicy to delete. Must + be in the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, server_tls_policy.DeleteServerTlsPolicyRequest): + request = server_tls_policy.DeleteServerTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_server_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_client_tls_policies( + self, + request: Optional[ + Union[client_tls_policy.ListClientTlsPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListClientTlsPoliciesPager: + r"""Lists ClientTlsPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_client_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListClientTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_client_tls_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest, dict]): + The request object. Request used by the + ListClientTlsPolicies method. + parent (str): + Required. The project and location from which the + ClientTlsPolicies should be listed, specified in the + format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesPager: + Response returned by the + ListClientTlsPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.ListClientTlsPoliciesRequest): + request = client_tls_policy.ListClientTlsPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_client_tls_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClientTlsPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_client_tls_policy( + self, + request: Optional[ + Union[client_tls_policy.GetClientTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> client_tls_policy.ClientTlsPolicy: + r"""Gets details of a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetClientTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_client_tls_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest, dict]): + The request object. Request used by the + GetClientTlsPolicy method. + name (str): + Required. A name of the ClientTlsPolicy to get. Must be + in the format + ``projects/*/locations/{location}/clientTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.ClientTlsPolicy: + ClientTlsPolicy is a resource that + specifies how a client should + authenticate connections to backends of + a service. This resource itself does not + affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.GetClientTlsPolicyRequest): + request = client_tls_policy.GetClientTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_client_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_client_tls_policy( + self, + request: Optional[ + Union[gcn_client_tls_policy.CreateClientTlsPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + client_tls_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new ClientTlsPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() + client_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateClientTlsPolicyRequest( + parent="parent_value", + client_tls_policy_id="client_tls_policy_id_value", + client_tls_policy=client_tls_policy, + ) + + # Make the request + operation = client.create_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest, dict]): + The request object. Request used by the + CreateClientTlsPolicy method. + parent (str): + Required. The parent resource of the ClientTlsPolicy. + Must be in the format + ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + client_tls_policy (google.cloud.network_security_v1alpha1.types.ClientTlsPolicy): + Required. ClientTlsPolicy resource to + be created. + + This corresponds to the ``client_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + client_tls_policy_id (str): + Required. Short name of the ClientTlsPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "client_mtls_policy". + + This corresponds to the ``client_tls_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate + connections to backends of a service. This resource + itself does not affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, client_tls_policy, client_tls_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_client_tls_policy.CreateClientTlsPolicyRequest): + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if client_tls_policy is not None: + request.client_tls_policy = client_tls_policy + if client_tls_policy_id is not None: + request.client_tls_policy_id = client_tls_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_client_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_client_tls_policy.ClientTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_client_tls_policy( + self, + request: Optional[ + Union[gcn_client_tls_policy.UpdateClientTlsPolicyRequest, dict] + ] = None, + *, + client_tls_policy: Optional[gcn_client_tls_policy.ClientTlsPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + client_tls_policy = network_security_v1alpha1.ClientTlsPolicy() + client_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateClientTlsPolicyRequest( + client_tls_policy=client_tls_policy, + ) + + # Make the request + operation = client.update_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest, dict]): + The request object. Request used by UpdateClientTlsPolicy + method. + client_tls_policy (google.cloud.network_security_v1alpha1.types.ClientTlsPolicy): + Required. Updated ClientTlsPolicy + resource. + + This corresponds to the ``client_tls_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the ClientTlsPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.ClientTlsPolicy` ClientTlsPolicy is a resource that specifies how a client should authenticate + connections to backends of a service. This resource + itself does not affect configuration unless it is + attached to a backend service resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [client_tls_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_client_tls_policy.UpdateClientTlsPolicyRequest): + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if client_tls_policy is not None: + request.client_tls_policy = client_tls_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_client_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("client_tls_policy.name", request.client_tls_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_client_tls_policy.ClientTlsPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_client_tls_policy( + self, + request: Optional[ + Union[client_tls_policy.DeleteClientTlsPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single ClientTlsPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_client_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteClientTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_client_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest, dict]): + The request object. Request used by the + DeleteClientTlsPolicy method. + name (str): + Required. A name of the ClientTlsPolicy to delete. Must + be in the format + ``projects/*/locations/{location}/clientTlsPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, client_tls_policy.DeleteClientTlsPolicyRequest): + request = client_tls_policy.DeleteClientTlsPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_client_tls_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_gateway_security_policies( + self, + request: Optional[ + Union[gateway_security_policy.ListGatewaySecurityPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGatewaySecurityPoliciesPager: + r"""Lists GatewaySecurityPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_gateway_security_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest, dict]): + The request object. Request used with the + ListGatewaySecurityPolicies method. + parent (str): + Required. The project and location from which the + GatewaySecurityPolicies should be listed, specified in + the format ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPoliciesPager: + Response returned by the + ListGatewaySecurityPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.ListGatewaySecurityPoliciesRequest + ): + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_gateway_security_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGatewaySecurityPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_gateway_security_policy( + self, + request: Optional[ + Union[gateway_security_policy.GetGatewaySecurityPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy.GatewaySecurityPolicy: + r"""Gets details of a single GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_gateway_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRequest, dict]): + The request object. Request used by the + GetGatewaySecurityPolicy method. + name (str): + Required. A name of the GatewaySecurityPolicy to get. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy: + The GatewaySecurityPolicy resource + contains a collection of + GatewaySecurityPolicyRules and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.GetGatewaySecurityPolicyRequest + ): + request = gateway_security_policy.GetGatewaySecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_gateway_security_policy( + self, + request: Optional[ + Union[gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + gateway_security_policy: Optional[ + gcn_gateway_security_policy.GatewaySecurityPolicy + ] = None, + gateway_security_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new GatewaySecurityPolicy in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.create_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRequest, dict]): + The request object. Request used by the + CreateGatewaySecurityPolicy method. + parent (str): + Required. The parent resource of the + GatewaySecurityPolicy. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy): + Required. GatewaySecurityPolicy + resource to be created. + + This corresponds to the ``gateway_security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_id (str): + Required. Short name of the GatewaySecurityPolicy + resource to be created. This value should be 1-63 + characters long, containing only letters, numbers, + hyphens, and underscores, and should not start with a + number. E.g. "gateway_security_policy1". + + This corresponds to the ``gateway_security_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy` The GatewaySecurityPolicy resource contains a collection of + GatewaySecurityPolicyRules and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, gateway_security_policy, gateway_security_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest + ): + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if gateway_security_policy is not None: + request.gateway_security_policy = gateway_security_policy + if gateway_security_policy_id is not None: + request.gateway_security_policy_id = gateway_security_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_gateway_security_policy.GatewaySecurityPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_gateway_security_policy( + self, + request: Optional[ + Union[gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, dict] + ] = None, + *, + gateway_security_policy: Optional[ + gcn_gateway_security_policy.GatewaySecurityPolicy + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single + GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRequest( + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.update_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRequest, dict]): + The request object. Request used by the + UpdateGatewaySecurityPolicy method. + gateway_security_policy (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy): + Required. Updated + GatewaySecurityPolicy resource. + + This corresponds to the ``gateway_security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy` The GatewaySecurityPolicy resource contains a collection of + GatewaySecurityPolicyRules and associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [gateway_security_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest + ): + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if gateway_security_policy is not None: + request.gateway_security_policy = gateway_security_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "gateway_security_policy.name", + request.gateway_security_policy.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_gateway_security_policy.GatewaySecurityPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_gateway_security_policy( + self, + request: Optional[ + Union[gateway_security_policy.DeleteGatewaySecurityPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single GatewaySecurityPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRequest, dict]): + The request object. Request used by the + DeleteGatewaySecurityPolicy method. + name (str): + Required. A name of the GatewaySecurityPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy.DeleteGatewaySecurityPolicyRequest + ): + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_gateway_security_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_gateway_security_policy_rules( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGatewaySecurityPolicyRulesPager: + r"""Lists GatewaySecurityPolicyRules in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_gateway_security_policy_rules(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policy_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest, dict]): + The request object. Request used with the + ListGatewaySecurityPolicyRules method. + parent (str): + Required. The project, location and + GatewaySecurityPolicy from which the + GatewaySecurityPolicyRules should be listed, specified + in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPolicyRulesPager: + Response returned by the + ListGatewaySecurityPolicyRules method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest + ): + request = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_gateway_security_policy_rules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGatewaySecurityPolicyRulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, dict + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy_rule.GatewaySecurityPolicyRule: + r"""Gets details of a single GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_gateway_security_policy_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRuleRequest, dict]): + The request object. Request used by the + GetGatewaySecurityPolicyRule method. + name (str): + Required. The name of the GatewaySecurityPolicyRule to + retrieve. Format: + projects/{project}/location/{location}/gatewaySecurityPolicies/*/rules/* + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule: + The GatewaySecurityPolicyRule + resource is in a nested collection + within a GatewaySecurityPolicy and + represents a traffic matching condition + and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest + ): + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + gateway_security_policy_rule: Optional[ + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule + ] = None, + gateway_security_policy_rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new GatewaySecurityPolicy in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.create_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRuleRequest, dict]): + The request object. Methods for GatewaySecurityPolicy + RULES/GatewaySecurityPolicyRules. + Request used by the + CreateGatewaySecurityPolicyRule method. + parent (str): + Required. The parent where this rule will be created. + Format : + projects/{project}/location/{location}/gatewaySecurityPolicies/\* + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_rule (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule): + Required. The rule to be created. + This corresponds to the ``gateway_security_policy_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + gateway_security_policy_rule_id (str): + The ID to use for the rule, which will become the final + component of the rule's resource name. This value should + be 4-63 characters, and valid characters are + /[a-z][0-9]-/. + + This corresponds to the ``gateway_security_policy_rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule` The GatewaySecurityPolicyRule resource is in a nested collection within a + GatewaySecurityPolicy and represents a traffic + matching condition and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + parent, + gateway_security_policy_rule, + gateway_security_policy_rule_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + ): + request = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if gateway_security_policy_rule is not None: + request.gateway_security_policy_rule = gateway_security_policy_rule + if gateway_security_policy_rule_id is not None: + request.gateway_security_policy_rule_id = ( + gateway_security_policy_rule_id + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + gateway_security_policy_rule: Optional[ + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single + GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRuleRequest( + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.update_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRuleRequest, dict]): + The request object. Request used by the + UpdateGatewaySecurityPolicyRule method. + gateway_security_policy_rule (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule): + Required. Updated + GatewaySecurityPolicyRule resource. + + This corresponds to the ``gateway_security_policy_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule` The GatewaySecurityPolicyRule resource is in a nested collection within a + GatewaySecurityPolicy and represents a traffic + matching condition and associated action to perform. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [gateway_security_policy_rule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + ): + request = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if gateway_security_policy_rule is not None: + request.gateway_security_policy_rule = gateway_security_policy_rule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "gateway_security_policy_rule.name", + request.gateway_security_policy_rule.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_gateway_security_policy_rule( + self, + request: Optional[ + Union[ + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single GatewaySecurityPolicyRule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRuleRequest, dict]): + The request object. Request used by the + DeleteGatewaySecurityPolicyRule method. + name (str): + Required. A name of the GatewaySecurityPolicyRule to + delete. Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}/rules/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest + ): + request = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_gateway_security_policy_rule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_url_lists( + self, + request: Optional[Union[url_list.ListUrlListsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListUrlListsPager: + r"""Lists UrlLists in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_url_lists(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListUrlListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_url_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListUrlListsRequest, dict]): + The request object. Request used by the ListUrlList + method. + parent (str): + Required. The project and location from which the + UrlLists should be listed, specified in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListUrlListsPager: + Response returned by the ListUrlLists + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.ListUrlListsRequest): + request = url_list.ListUrlListsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_url_lists] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUrlListsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_url_list( + self, + request: Optional[Union[url_list.GetUrlListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> url_list.UrlList: + r"""Gets details of a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetUrlListRequest( + name="name_value", + ) + + # Make the request + response = client.get_url_list(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetUrlListRequest, dict]): + The request object. Request used by the GetUrlList + method. + name (str): + Required. A name of the UrlList to get. Must be in the + format ``projects/*/locations/{location}/urlLists/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.UrlList: + UrlList proto helps users to set + reusable, independently manageable lists + of hosts, host patterns, URLs, URL + patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.GetUrlListRequest): + request = url_list.GetUrlListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_url_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_url_list( + self, + request: Optional[Union[gcn_url_list.CreateUrlListRequest, dict]] = None, + *, + parent: Optional[str] = None, + url_list: Optional[gcn_url_list.UrlList] = None, + url_list_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new UrlList in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ['values_value1', 'values_value2'] + + request = network_security_v1alpha1.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + url_list=url_list, + ) + + # Make the request + operation = client.create_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateUrlListRequest, dict]): + The request object. Request used by the CreateUrlList + method. + parent (str): + Required. The parent resource of the UrlList. Must be in + the format ``projects/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_list (google.cloud.network_security_v1alpha1.types.UrlList): + Required. UrlList resource to be + created. + + This corresponds to the ``url_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_list_id (str): + Required. Short name of the UrlList resource to be + created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "url_list". + + This corresponds to the ``url_list_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.UrlList` UrlList proto helps users to set reusable, independently manageable lists + of hosts, host patterns, URLs, URL patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, url_list, url_list_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_url_list.CreateUrlListRequest): + request = gcn_url_list.CreateUrlListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if url_list is not None: + request.url_list = url_list + if url_list_id is not None: + request.url_list_id = url_list_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_url_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_url_list.UrlList, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_url_list( + self, + request: Optional[Union[gcn_url_list.UpdateUrlListRequest, dict]] = None, + *, + url_list: Optional[gcn_url_list.UrlList] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ['values_value1', 'values_value2'] + + request = network_security_v1alpha1.UpdateUrlListRequest( + url_list=url_list, + ) + + # Make the request + operation = client.update_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateUrlListRequest, dict]): + The request object. Request used by UpdateUrlList method. + url_list (google.cloud.network_security_v1alpha1.types.UrlList): + Required. Updated UrlList resource. + This corresponds to the ``url_list`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the UrlList resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.UrlList` UrlList proto helps users to set reusable, independently manageable lists + of hosts, host patterns, URLs, URL patterns. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [url_list, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_url_list.UpdateUrlListRequest): + request = gcn_url_list.UpdateUrlListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if url_list is not None: + request.url_list = url_list + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_url_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("url_list.name", request.url_list.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_url_list.UrlList, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_url_list( + self, + request: Optional[Union[url_list.DeleteUrlListRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single UrlList. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteUrlListRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteUrlListRequest, dict]): + The request object. Request used by the DeleteUrlList + method. + name (str): + Required. A name of the UrlList to delete. Must be in + the format + ``projects/*/locations/{location}/urlLists/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, url_list.DeleteUrlListRequest): + request = url_list.DeleteUrlListRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_url_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_tls_inspection_policies( + self, + request: Optional[ + Union[tls_inspection_policy.ListTlsInspectionPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTlsInspectionPoliciesPager: + r"""Lists TlsInspectionPolicies in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_tls_inspection_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListTlsInspectionPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_inspection_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest, dict]): + The request object. Request used with the + ListTlsInspectionPolicies method. + parent (str): + Required. The project and location from which the + TlsInspectionPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListTlsInspectionPoliciesPager: + Response returned by the + ListTlsInspectionPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, tls_inspection_policy.ListTlsInspectionPoliciesRequest + ): + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_tls_inspection_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTlsInspectionPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tls_inspection_policy( + self, + request: Optional[ + Union[tls_inspection_policy.GetTlsInspectionPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tls_inspection_policy.TlsInspectionPolicy: + r"""Gets details of a single TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_tls_inspection_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetTlsInspectionPolicyRequest, dict]): + The request object. Request used by the + GetTlsInspectionPolicy method. + name (str): + Required. A name of the TlsInspectionPolicy to get. Must + be in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy: + The TlsInspectionPolicy resource + contains references to CA pools in + Certificate Authority Service and + associated metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, tls_inspection_policy.GetTlsInspectionPolicyRequest): + request = tls_inspection_policy.GetTlsInspectionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tls_inspection_policy( + self, + request: Optional[ + Union[gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + tls_inspection_policy: Optional[ + gcn_tls_inspection_policy.TlsInspectionPolicy + ] = None, + tls_inspection_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new TlsInspectionPolicy in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.create_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateTlsInspectionPolicyRequest, dict]): + The request object. Request used by the + CreateTlsInspectionPolicy method. + parent (str): + Required. The parent resource of the + TlsInspectionPolicy. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tls_inspection_policy (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy): + Required. TlsInspectionPolicy + resource to be created. + + This corresponds to the ``tls_inspection_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tls_inspection_policy_id (str): + Required. Short name of the TlsInspectionPolicy resource + to be created. This value should be 1-63 characters + long, containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "tls_inspection_policy1". + + This corresponds to the ``tls_inspection_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy` The TlsInspectionPolicy resource contains references to CA pools in + Certificate Authority Service and associated + metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, tls_inspection_policy, tls_inspection_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest + ): + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tls_inspection_policy is not None: + request.tls_inspection_policy = tls_inspection_policy + if tls_inspection_policy_id is not None: + request.tls_inspection_policy_id = tls_inspection_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_tls_inspection_policy.TlsInspectionPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_tls_inspection_policy( + self, + request: Optional[ + Union[gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, dict] + ] = None, + *, + tls_inspection_policy: Optional[ + gcn_tls_inspection_policy.TlsInspectionPolicy + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single + TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.UpdateTlsInspectionPolicyRequest( + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.update_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateTlsInspectionPolicyRequest, dict]): + The request object. Request used by the + UpdateTlsInspectionPolicy method. + tls_inspection_policy (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy): + Required. Updated TlsInspectionPolicy + resource. + + This corresponds to the ``tls_inspection_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the TlsInspectionPolicy resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the user + does not provide a mask then all fields will be + overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy` The TlsInspectionPolicy resource contains references to CA pools in + Certificate Authority Service and associated + metadata. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [tls_inspection_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest + ): + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tls_inspection_policy is not None: + request.tls_inspection_policy = tls_inspection_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tls_inspection_policy.name", request.tls_inspection_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_tls_inspection_policy.TlsInspectionPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_tls_inspection_policy( + self, + request: Optional[ + Union[tls_inspection_policy.DeleteTlsInspectionPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single TlsInspectionPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteTlsInspectionPolicyRequest, dict]): + The request object. Request used by the + DeleteTlsInspectionPolicy method. + name (str): + Required. A name of the TlsInspectionPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, tls_inspection_policy.DeleteTlsInspectionPolicyRequest + ): + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_tls_inspection_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_authz_policies( + self, + request: Optional[Union[authz_policy.ListAuthzPoliciesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAuthzPoliciesPager: + r"""Lists AuthzPolicies in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_authz_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthzPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authz_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest, dict]): + The request object. Message for requesting list of ``AuthzPolicy`` + resources. + parent (str): + Required. The project and location from which the + ``AuthzPolicy`` resources are listed, specified in the + following format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthzPoliciesPager: + Message for response to listing AuthzPolicy resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.ListAuthzPoliciesRequest): + request = authz_policy.ListAuthzPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_authz_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAuthzPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_authz_policy( + self, + request: Optional[Union[authz_policy.GetAuthzPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authz_policy.AuthzPolicy: + r"""Gets details of a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_authz_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetAuthzPolicyRequest, dict]): + The request object. Message for getting a ``AuthzPolicy`` resource. + name (str): + Required. A name of the ``AuthzPolicy`` resource to get. + Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.AuthzPolicy: + AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.GetAuthzPolicyRequest): + request = authz_policy.GetAuthzPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_authz_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_authz_policy( + self, + request: Optional[ + Union[gcn_authz_policy.CreateAuthzPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + authz_policy: Optional[gcn_authz_policy.AuthzPolicy] = None, + authz_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new AuthzPolicy in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ['resources_value1', 'resources_value2'] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + authz_policy=authz_policy, + ) + + # Make the request + operation = client.create_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateAuthzPolicyRequest, dict]): + The request object. Message for creating an ``AuthzPolicy`` resource. + parent (str): + Required. The parent resource of the ``AuthzPolicy`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authz_policy (google.cloud.network_security_v1alpha1.types.AuthzPolicy): + Required. ``AuthzPolicy`` resource to be created. + This corresponds to the ``authz_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authz_policy_id (str): + Required. User-provided ID of the ``AuthzPolicy`` + resource to be created. + + This corresponds to the ``authz_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy` AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, authz_policy, authz_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_authz_policy.CreateAuthzPolicyRequest): + request = gcn_authz_policy.CreateAuthzPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if authz_policy is not None: + request.authz_policy = authz_policy + if authz_policy_id is not None: + request.authz_policy_id = authz_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_authz_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_authz_policy.AuthzPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_authz_policy( + self, + request: Optional[ + Union[gcn_authz_policy.UpdateAuthzPolicyRequest, dict] + ] = None, + *, + authz_policy: Optional[gcn_authz_policy.AuthzPolicy] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ['resources_value1', 'resources_value2'] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.UpdateAuthzPolicyRequest( + authz_policy=authz_policy, + ) + + # Make the request + operation = client.update_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateAuthzPolicyRequest, dict]): + The request object. Message for updating an ``AuthzPolicy`` resource. + authz_policy (google.cloud.network_security_v1alpha1.types.AuthzPolicy): + Required. ``AuthzPolicy`` resource being updated. + This corresponds to the ``authz_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Used to specify the fields to be overwritten + in the ``AuthzPolicy`` resource by the update. The + fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field is + overwritten if it is in the mask. If the user does not + specify a mask, then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.AuthzPolicy` AuthzPolicy is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for + security purposes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [authz_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcn_authz_policy.UpdateAuthzPolicyRequest): + request = gcn_authz_policy.UpdateAuthzPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if authz_policy is not None: + request.authz_policy = authz_policy + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_authz_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("authz_policy.name", request.authz_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_authz_policy.AuthzPolicy, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_authz_policy( + self, + request: Optional[Union[authz_policy.DeleteAuthzPolicyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single AuthzPolicy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteAuthzPolicyRequest, dict]): + The request object. Message for deleting an ``AuthzPolicy`` resource. + name (str): + Required. The name of the ``AuthzPolicy`` resource to + delete. Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, authz_policy.DeleteAuthzPolicyRequest): + request = authz_policy.DeleteAuthzPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_authz_policy] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/pagers.py index 328060dc0fc3..604a11a24c75 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/pagers.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/pagers.py @@ -38,32 +38,1206 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy, + authz_policy, + backend_authentication_config, + client_tls_policy, + gateway_security_policy, + gateway_security_policy_rule, + server_tls_policy, + tls_inspection_policy, + url_list, +) + + +class ListAuthorizationPoliciesPager: + """A pager for iterating through ``list_authorization_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``authorization_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAuthorizationPolicies`` requests and continue to iterate + through the ``authorization_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., authorization_policy.ListAuthorizationPoliciesResponse], + request: authorization_policy.ListAuthorizationPoliciesRequest, + response: authorization_policy.ListAuthorizationPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = authorization_policy.ListAuthorizationPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[authorization_policy.ListAuthorizationPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[authorization_policy.AuthorizationPolicy]: + for page in self.pages: + yield from page.authorization_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAuthorizationPoliciesAsyncPager: + """A pager for iterating through ``list_authorization_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``authorization_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAuthorizationPolicies`` requests and continue to iterate + through the ``authorization_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[authorization_policy.ListAuthorizationPoliciesResponse] + ], + request: authorization_policy.ListAuthorizationPoliciesRequest, + response: authorization_policy.ListAuthorizationPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = authorization_policy.ListAuthorizationPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[authorization_policy.ListAuthorizationPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[authorization_policy.AuthorizationPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.authorization_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackendAuthenticationConfigsPager: + """A pager for iterating through ``list_backend_authentication_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backend_authentication_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackendAuthenticationConfigs`` requests and continue to iterate + through the ``backend_authentication_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., backend_authentication_config.ListBackendAuthenticationConfigsResponse + ], + request: backend_authentication_config.ListBackendAuthenticationConfigsRequest, + response: backend_authentication_config.ListBackendAuthenticationConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest( + request + ) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__( + self, + ) -> Iterator[backend_authentication_config.BackendAuthenticationConfig]: + for page in self.pages: + yield from page.backend_authentication_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackendAuthenticationConfigsAsyncPager: + """A pager for iterating through ``list_backend_authentication_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backend_authentication_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackendAuthenticationConfigs`` requests and continue to iterate + through the ``backend_authentication_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse + ], + ], + request: backend_authentication_config.ListBackendAuthenticationConfigsRequest, + response: backend_authentication_config.ListBackendAuthenticationConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest( + request + ) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[backend_authentication_config.BackendAuthenticationConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.backend_authentication_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListServerTlsPoliciesPager: + """A pager for iterating through ``list_server_tls_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``server_tls_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServerTlsPolicies`` requests and continue to iterate + through the ``server_tls_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., server_tls_policy.ListServerTlsPoliciesResponse], + request: server_tls_policy.ListServerTlsPoliciesRequest, + response: server_tls_policy.ListServerTlsPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = server_tls_policy.ListServerTlsPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[server_tls_policy.ListServerTlsPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[server_tls_policy.ServerTlsPolicy]: + for page in self.pages: + yield from page.server_tls_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListServerTlsPoliciesAsyncPager: + """A pager for iterating through ``list_server_tls_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``server_tls_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServerTlsPolicies`` requests and continue to iterate + through the ``server_tls_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[server_tls_policy.ListServerTlsPoliciesResponse] + ], + request: server_tls_policy.ListServerTlsPoliciesRequest, + response: server_tls_policy.ListServerTlsPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = server_tls_policy.ListServerTlsPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[server_tls_policy.ListServerTlsPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[server_tls_policy.ServerTlsPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.server_tls_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClientTlsPoliciesPager: + """A pager for iterating through ``list_client_tls_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``client_tls_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClientTlsPolicies`` requests and continue to iterate + through the ``client_tls_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., client_tls_policy.ListClientTlsPoliciesResponse], + request: client_tls_policy.ListClientTlsPoliciesRequest, + response: client_tls_policy.ListClientTlsPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = client_tls_policy.ListClientTlsPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[client_tls_policy.ListClientTlsPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[client_tls_policy.ClientTlsPolicy]: + for page in self.pages: + yield from page.client_tls_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClientTlsPoliciesAsyncPager: + """A pager for iterating through ``list_client_tls_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``client_tls_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClientTlsPolicies`` requests and continue to iterate + through the ``client_tls_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[client_tls_policy.ListClientTlsPoliciesResponse] + ], + request: client_tls_policy.ListClientTlsPoliciesRequest, + response: client_tls_policy.ListClientTlsPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = client_tls_policy.ListClientTlsPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[client_tls_policy.ListClientTlsPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[client_tls_policy.ClientTlsPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.client_tls_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGatewaySecurityPoliciesPager: + """A pager for iterating through ``list_gateway_security_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``gateway_security_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGatewaySecurityPolicies`` requests and continue to iterate + through the ``gateway_security_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., gateway_security_policy.ListGatewaySecurityPoliciesResponse + ], + request: gateway_security_policy.ListGatewaySecurityPoliciesRequest, + response: gateway_security_policy.ListGatewaySecurityPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = gateway_security_policy.ListGatewaySecurityPoliciesRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[gateway_security_policy.ListGatewaySecurityPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[gateway_security_policy.GatewaySecurityPolicy]: + for page in self.pages: + yield from page.gateway_security_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGatewaySecurityPoliciesAsyncPager: + """A pager for iterating through ``list_gateway_security_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``gateway_security_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGatewaySecurityPolicies`` requests and continue to iterate + through the ``gateway_security_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[gateway_security_policy.ListGatewaySecurityPoliciesResponse] + ], + request: gateway_security_policy.ListGatewaySecurityPoliciesRequest, + response: gateway_security_policy.ListGatewaySecurityPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = gateway_security_policy.ListGatewaySecurityPoliciesRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[gateway_security_policy.ListGatewaySecurityPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[gateway_security_policy.GatewaySecurityPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.gateway_security_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGatewaySecurityPolicyRulesPager: + """A pager for iterating through ``list_gateway_security_policy_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``gateway_security_policy_rules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGatewaySecurityPolicyRules`` requests and continue to iterate + through the ``gateway_security_policy_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse + ], + request: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + response: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest(request) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__( + self, + ) -> Iterator[gateway_security_policy_rule.GatewaySecurityPolicyRule]: + for page in self.pages: + yield from page.gateway_security_policy_rules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGatewaySecurityPolicyRulesAsyncPager: + """A pager for iterating through ``list_gateway_security_policy_rules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``gateway_security_policy_rules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGatewaySecurityPolicyRules`` requests and continue to iterate + through the ``gateway_security_policy_rules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse + ], + ], + request: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + response: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest(request) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[gateway_security_policy_rule.GatewaySecurityPolicyRule]: + async def async_generator(): + async for page in self.pages: + for response in page.gateway_security_policy_rules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUrlListsPager: + """A pager for iterating through ``list_url_lists`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListUrlListsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``url_lists`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUrlLists`` requests and continue to iterate + through the ``url_lists`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListUrlListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., url_list.ListUrlListsResponse], + request: url_list.ListUrlListsRequest, + response: url_list.ListUrlListsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListUrlListsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListUrlListsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = url_list.ListUrlListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + @property + def pages(self) -> Iterator[url_list.ListUrlListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response -class ListClientTlsPoliciesPager: - """A pager for iterating through ``list_client_tls_policies`` requests. + def __iter__(self) -> Iterator[url_list.UrlList]: + for page in self.pages: + yield from page.url_lists + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUrlListsAsyncPager: + """A pager for iterating through ``list_url_lists`` requests. This class thinly wraps an initial - :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` object, and + :class:`google.cloud.network_security_v1alpha1.types.ListUrlListsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``url_lists`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUrlLists`` requests and continue to iterate + through the ``url_lists`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListUrlListsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[url_list.ListUrlListsResponse]], + request: url_list.ListUrlListsRequest, + response: url_list.ListUrlListsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListUrlListsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListUrlListsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = url_list.ListUrlListsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[url_list.ListUrlListsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[url_list.UrlList]: + async def async_generator(): + async for page in self.pages: + for response in page.url_lists: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTlsInspectionPoliciesPager: + """A pager for iterating through ``list_tls_inspection_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse` object, and provides an ``__iter__`` method to iterate through its - ``client_tls_policies`` field. + ``tls_inspection_policies`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListClientTlsPolicies`` requests and continue to iterate - through the ``client_tls_policies`` field on the + ``ListTlsInspectionPolicies`` requests and continue to iterate + through the ``tls_inspection_policies`` field on the corresponding responses. - All the usual :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., client_tls_policy.ListClientTlsPoliciesResponse], - request: client_tls_policy.ListClientTlsPoliciesRequest, - response: client_tls_policy.ListClientTlsPoliciesResponse, + method: Callable[..., tls_inspection_policy.ListTlsInspectionPoliciesResponse], + request: tls_inspection_policy.ListTlsInspectionPoliciesRequest, + response: tls_inspection_policy.ListTlsInspectionPoliciesResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +1248,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest): + request (google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest): The initial request object. - response (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse): + response (google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -87,7 +1261,7 @@ def __init__( be of type `bytes`. """ self._method = method - self._request = client_tls_policy.ListClientTlsPoliciesRequest(request) + self._request = tls_inspection_policy.ListTlsInspectionPoliciesRequest(request) self._response = response self._retry = retry self._timeout = timeout @@ -97,7 +1271,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[client_tls_policy.ListClientTlsPoliciesResponse]: + def pages( + self, + ) -> Iterator[tls_inspection_policy.ListTlsInspectionPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -109,28 +1285,28 @@ def pages(self) -> Iterator[client_tls_policy.ListClientTlsPoliciesResponse]: ) yield self._response - def __iter__(self) -> Iterator[client_tls_policy.ClientTlsPolicy]: + def __iter__(self) -> Iterator[tls_inspection_policy.TlsInspectionPolicy]: for page in self.pages: - yield from page.client_tls_policies + yield from page.tls_inspection_policies def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListClientTlsPoliciesAsyncPager: - """A pager for iterating through ``list_client_tls_policies`` requests. +class ListTlsInspectionPoliciesAsyncPager: + """A pager for iterating through ``list_tls_inspection_policies`` requests. This class thinly wraps an initial - :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` object, and + :class:`google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse` object, and provides an ``__aiter__`` method to iterate through its - ``client_tls_policies`` field. + ``tls_inspection_policies`` field. If there are more pages, the ``__aiter__`` method will make additional - ``ListClientTlsPolicies`` requests and continue to iterate - through the ``client_tls_policies`` field on the + ``ListTlsInspectionPolicies`` requests and continue to iterate + through the ``tls_inspection_policies`` field on the corresponding responses. - All the usual :class:`google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse` + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -138,10 +1314,10 @@ class ListClientTlsPoliciesAsyncPager: def __init__( self, method: Callable[ - ..., Awaitable[client_tls_policy.ListClientTlsPoliciesResponse] + ..., Awaitable[tls_inspection_policy.ListTlsInspectionPoliciesResponse] ], - request: client_tls_policy.ListClientTlsPoliciesRequest, - response: client_tls_policy.ListClientTlsPoliciesResponse, + request: tls_inspection_policy.ListTlsInspectionPoliciesRequest, + response: tls_inspection_policy.ListTlsInspectionPoliciesResponse, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -152,9 +1328,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest): + request (google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest): The initial request object. - response (google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesResponse): + response (google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesResponse): The initial response object. retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. @@ -165,7 +1341,7 @@ def __init__( be of type `bytes`. """ self._method = method - self._request = client_tls_policy.ListClientTlsPoliciesRequest(request) + self._request = tls_inspection_policy.ListTlsInspectionPoliciesRequest(request) self._response = response self._retry = retry self._timeout = timeout @@ -177,7 +1353,7 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterator[client_tls_policy.ListClientTlsPoliciesResponse]: + ) -> AsyncIterator[tls_inspection_policy.ListTlsInspectionPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -189,10 +1365,166 @@ async def pages( ) yield self._response - def __aiter__(self) -> AsyncIterator[client_tls_policy.ClientTlsPolicy]: + def __aiter__(self) -> AsyncIterator[tls_inspection_policy.TlsInspectionPolicy]: async def async_generator(): async for page in self.pages: - for response in page.client_tls_policies: + for response in page.tls_inspection_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAuthzPoliciesPager: + """A pager for iterating through ``list_authz_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``authz_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAuthzPolicies`` requests and continue to iterate + through the ``authz_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., authz_policy.ListAuthzPoliciesResponse], + request: authz_policy.ListAuthzPoliciesRequest, + response: authz_policy.ListAuthzPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = authz_policy.ListAuthzPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[authz_policy.ListAuthzPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[authz_policy.AuthzPolicy]: + for page in self.pages: + yield from page.authz_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAuthzPoliciesAsyncPager: + """A pager for iterating through ``list_authz_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``authz_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAuthzPolicies`` requests and continue to iterate + through the ``authz_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[authz_policy.ListAuthzPoliciesResponse]], + request: authz_policy.ListAuthzPoliciesRequest, + response: authz_policy.ListAuthzPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = authz_policy.ListAuthzPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[authz_policy.ListAuthzPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[authz_policy.AuthzPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.authz_policies: yield response return async_generator() diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/base.py index 4aa897b9972e..36fa10f54f6c 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/base.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/base.py @@ -30,10 +30,40 @@ import google.protobuf from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -140,6 +170,81 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.list_authorization_policies: gapic_v1.method.wrap_method( + self.list_authorization_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_authorization_policy: gapic_v1.method.wrap_method( + self.get_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_authorization_policy: gapic_v1.method.wrap_method( + self.create_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_authorization_policy: gapic_v1.method.wrap_method( + self.update_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_authorization_policy: gapic_v1.method.wrap_method( + self.delete_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_backend_authentication_configs: gapic_v1.method.wrap_method( + self.list_backend_authentication_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_backend_authentication_config: gapic_v1.method.wrap_method( + self.get_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.create_backend_authentication_config: gapic_v1.method.wrap_method( + self.create_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.update_backend_authentication_config: gapic_v1.method.wrap_method( + self.update_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_backend_authentication_config: gapic_v1.method.wrap_method( + self.delete_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.list_server_tls_policies: gapic_v1.method.wrap_method( + self.list_server_tls_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_server_tls_policy: gapic_v1.method.wrap_method( + self.get_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_server_tls_policy: gapic_v1.method.wrap_method( + self.create_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_server_tls_policy: gapic_v1.method.wrap_method( + self.update_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_server_tls_policy: gapic_v1.method.wrap_method( + self.delete_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), self.list_client_tls_policies: gapic_v1.method.wrap_method( self.list_client_tls_policies, default_timeout=None, @@ -165,6 +270,131 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_gateway_security_policies: gapic_v1.method.wrap_method( + self.list_gateway_security_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_gateway_security_policy: gapic_v1.method.wrap_method( + self.get_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_gateway_security_policy: gapic_v1.method.wrap_method( + self.create_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_gateway_security_policy: gapic_v1.method.wrap_method( + self.update_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_gateway_security_policy: gapic_v1.method.wrap_method( + self.delete_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_gateway_security_policy_rules: gapic_v1.method.wrap_method( + self.list_gateway_security_policy_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_gateway_security_policy_rule: gapic_v1.method.wrap_method( + self.get_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.create_gateway_security_policy_rule: gapic_v1.method.wrap_method( + self.create_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_gateway_security_policy_rule: gapic_v1.method.wrap_method( + self.update_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_gateway_security_policy_rule: gapic_v1.method.wrap_method( + self.delete_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_url_lists: gapic_v1.method.wrap_method( + self.list_url_lists, + default_timeout=None, + client_info=client_info, + ), + self.get_url_list: gapic_v1.method.wrap_method( + self.get_url_list, + default_timeout=None, + client_info=client_info, + ), + self.create_url_list: gapic_v1.method.wrap_method( + self.create_url_list, + default_timeout=None, + client_info=client_info, + ), + self.update_url_list: gapic_v1.method.wrap_method( + self.update_url_list, + default_timeout=None, + client_info=client_info, + ), + self.delete_url_list: gapic_v1.method.wrap_method( + self.delete_url_list, + default_timeout=None, + client_info=client_info, + ), + self.list_tls_inspection_policies: gapic_v1.method.wrap_method( + self.list_tls_inspection_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_tls_inspection_policy: gapic_v1.method.wrap_method( + self.get_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_tls_inspection_policy: gapic_v1.method.wrap_method( + self.create_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_tls_inspection_policy: gapic_v1.method.wrap_method( + self.update_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_tls_inspection_policy: gapic_v1.method.wrap_method( + self.delete_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_authz_policies: gapic_v1.method.wrap_method( + self.list_authz_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_authz_policy: gapic_v1.method.wrap_method( + self.get_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_authz_policy: gapic_v1.method.wrap_method( + self.create_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_authz_policy: gapic_v1.method.wrap_method( + self.update_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_authz_policy: gapic_v1.method.wrap_method( + self.delete_authz_policy, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -226,6 +456,161 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + @property + def list_authorization_policies( + self, + ) -> Callable[ + [authorization_policy.ListAuthorizationPoliciesRequest], + Union[ + authorization_policy.ListAuthorizationPoliciesResponse, + Awaitable[authorization_policy.ListAuthorizationPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.GetAuthorizationPolicyRequest], + Union[ + authorization_policy.AuthorizationPolicy, + Awaitable[authorization_policy.AuthorizationPolicy], + ], + ]: + raise NotImplementedError() + + @property + def create_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.CreateAuthorizationPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.UpdateAuthorizationPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.DeleteAuthorizationPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backend_authentication_configs( + self, + ) -> Callable[ + [backend_authentication_config.ListBackendAuthenticationConfigsRequest], + Union[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse, + Awaitable[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.GetBackendAuthenticationConfigRequest], + Union[ + backend_authentication_config.BackendAuthenticationConfig, + Awaitable[backend_authentication_config.BackendAuthenticationConfig], + ], + ]: + raise NotImplementedError() + + @property + def create_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.DeleteBackendAuthenticationConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_server_tls_policies( + self, + ) -> Callable[ + [server_tls_policy.ListServerTlsPoliciesRequest], + Union[ + server_tls_policy.ListServerTlsPoliciesResponse, + Awaitable[server_tls_policy.ListServerTlsPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.GetServerTlsPolicyRequest], + Union[ + server_tls_policy.ServerTlsPolicy, + Awaitable[server_tls_policy.ServerTlsPolicy], + ], + ]: + raise NotImplementedError() + + @property + def create_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.CreateServerTlsPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.UpdateServerTlsPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.DeleteServerTlsPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_client_tls_policies( self, @@ -277,6 +662,254 @@ def delete_client_tls_policy( ]: raise NotImplementedError() + @property + def list_gateway_security_policies( + self, + ) -> Callable[ + [gateway_security_policy.ListGatewaySecurityPoliciesRequest], + Union[ + gateway_security_policy.ListGatewaySecurityPoliciesResponse, + Awaitable[gateway_security_policy.ListGatewaySecurityPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.GetGatewaySecurityPolicyRequest], + Union[ + gateway_security_policy.GatewaySecurityPolicy, + Awaitable[gateway_security_policy.GatewaySecurityPolicy], + ], + ]: + raise NotImplementedError() + + @property + def create_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.DeleteGatewaySecurityPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_gateway_security_policy_rules( + self, + ) -> Callable[ + [gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest], + Union[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + Awaitable[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest], + Union[ + gateway_security_policy_rule.GatewaySecurityPolicyRule, + Awaitable[gateway_security_policy_rule.GatewaySecurityPolicyRule], + ], + ]: + raise NotImplementedError() + + @property + def create_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_url_lists( + self, + ) -> Callable[ + [url_list.ListUrlListsRequest], + Union[url_list.ListUrlListsResponse, Awaitable[url_list.ListUrlListsResponse]], + ]: + raise NotImplementedError() + + @property + def get_url_list( + self, + ) -> Callable[ + [url_list.GetUrlListRequest], + Union[url_list.UrlList, Awaitable[url_list.UrlList]], + ]: + raise NotImplementedError() + + @property + def create_url_list( + self, + ) -> Callable[ + [gcn_url_list.CreateUrlListRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_url_list( + self, + ) -> Callable[ + [gcn_url_list.UpdateUrlListRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_url_list( + self, + ) -> Callable[ + [url_list.DeleteUrlListRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_tls_inspection_policies( + self, + ) -> Callable[ + [tls_inspection_policy.ListTlsInspectionPoliciesRequest], + Union[ + tls_inspection_policy.ListTlsInspectionPoliciesResponse, + Awaitable[tls_inspection_policy.ListTlsInspectionPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.GetTlsInspectionPolicyRequest], + Union[ + tls_inspection_policy.TlsInspectionPolicy, + Awaitable[tls_inspection_policy.TlsInspectionPolicy], + ], + ]: + raise NotImplementedError() + + @property + def create_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.DeleteTlsInspectionPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_authz_policies( + self, + ) -> Callable[ + [authz_policy.ListAuthzPoliciesRequest], + Union[ + authz_policy.ListAuthzPoliciesResponse, + Awaitable[authz_policy.ListAuthzPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_authz_policy( + self, + ) -> Callable[ + [authz_policy.GetAuthzPolicyRequest], + Union[authz_policy.AuthzPolicy, Awaitable[authz_policy.AuthzPolicy]], + ]: + raise NotImplementedError() + + @property + def create_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.CreateAuthzPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.UpdateAuthzPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_authz_policy( + self, + ) -> Callable[ + [authz_policy.DeleteAuthzPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc.py index 569237710faa..3e70d7dd0165 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc.py @@ -32,10 +32,40 @@ import grpc # type: ignore import proto # type: ignore +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .base import DEFAULT_CLIENT_INFO, NetworkSecurityTransport @@ -348,20 +378,20 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def list_client_tls_policies( + def list_authorization_policies( self, ) -> Callable[ - [client_tls_policy.ListClientTlsPoliciesRequest], - client_tls_policy.ListClientTlsPoliciesResponse, + [authorization_policy.ListAuthorizationPoliciesRequest], + authorization_policy.ListAuthorizationPoliciesResponse, ]: - r"""Return a callable for the list client tls policies method over gRPC. + r"""Return a callable for the list authorization policies method over gRPC. - Lists ClientTlsPolicies in a given project and + Lists AuthorizationPolicies in a given project and location. Returns: - Callable[[~.ListClientTlsPoliciesRequest], - ~.ListClientTlsPoliciesResponse]: + Callable[[~.ListAuthorizationPoliciesRequest], + ~.ListAuthorizationPoliciesResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -369,27 +399,30 @@ def list_client_tls_policies( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_client_tls_policies" not in self._stubs: - self._stubs["list_client_tls_policies"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListClientTlsPolicies", - request_serializer=client_tls_policy.ListClientTlsPoliciesRequest.serialize, - response_deserializer=client_tls_policy.ListClientTlsPoliciesResponse.deserialize, + if "list_authorization_policies" not in self._stubs: + self._stubs[ + "list_authorization_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListAuthorizationPolicies", + request_serializer=authorization_policy.ListAuthorizationPoliciesRequest.serialize, + response_deserializer=authorization_policy.ListAuthorizationPoliciesResponse.deserialize, ) - return self._stubs["list_client_tls_policies"] + return self._stubs["list_authorization_policies"] @property - def get_client_tls_policy( + def get_authorization_policy( self, ) -> Callable[ - [client_tls_policy.GetClientTlsPolicyRequest], client_tls_policy.ClientTlsPolicy + [authorization_policy.GetAuthorizationPolicyRequest], + authorization_policy.AuthorizationPolicy, ]: - r"""Return a callable for the get client tls policy method over gRPC. + r"""Return a callable for the get authorization policy method over gRPC. - Gets details of a single ClientTlsPolicy. + Gets details of a single AuthorizationPolicy. Returns: - Callable[[~.GetClientTlsPolicyRequest], - ~.ClientTlsPolicy]: + Callable[[~.GetAuthorizationPolicyRequest], + ~.AuthorizationPolicy]: A function that, when called, will call the underlying RPC on the server. """ @@ -397,27 +430,346 @@ def get_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_client_tls_policy" not in self._stubs: - self._stubs["get_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetClientTlsPolicy", - request_serializer=client_tls_policy.GetClientTlsPolicyRequest.serialize, - response_deserializer=client_tls_policy.ClientTlsPolicy.deserialize, + if "get_authorization_policy" not in self._stubs: + self._stubs["get_authorization_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetAuthorizationPolicy", + request_serializer=authorization_policy.GetAuthorizationPolicyRequest.serialize, + response_deserializer=authorization_policy.AuthorizationPolicy.deserialize, ) - return self._stubs["get_client_tls_policy"] + return self._stubs["get_authorization_policy"] @property - def create_client_tls_policy( + def create_authorization_policy( self, ) -> Callable[ - [gcn_client_tls_policy.CreateClientTlsPolicyRequest], operations_pb2.Operation + [gcn_authorization_policy.CreateAuthorizationPolicyRequest], + operations_pb2.Operation, ]: - r"""Return a callable for the create client tls policy method over gRPC. + r"""Return a callable for the create authorization policy method over gRPC. - Creates a new ClientTlsPolicy in a given project and + Creates a new AuthorizationPolicy in a given project + and location. + + Returns: + Callable[[~.CreateAuthorizationPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authorization_policy" not in self._stubs: + self._stubs[ + "create_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateAuthorizationPolicy", + request_serializer=gcn_authorization_policy.CreateAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authorization_policy"] + + @property + def update_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.UpdateAuthorizationPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update authorization policy method over gRPC. + + Updates the parameters of a single + AuthorizationPolicy. + + Returns: + Callable[[~.UpdateAuthorizationPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authorization_policy" not in self._stubs: + self._stubs[ + "update_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateAuthorizationPolicy", + request_serializer=gcn_authorization_policy.UpdateAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authorization_policy"] + + @property + def delete_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.DeleteAuthorizationPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete authorization policy method over gRPC. + + Deletes a single AuthorizationPolicy. + + Returns: + Callable[[~.DeleteAuthorizationPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authorization_policy" not in self._stubs: + self._stubs[ + "delete_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteAuthorizationPolicy", + request_serializer=authorization_policy.DeleteAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_authorization_policy"] + + @property + def list_backend_authentication_configs( + self, + ) -> Callable[ + [backend_authentication_config.ListBackendAuthenticationConfigsRequest], + backend_authentication_config.ListBackendAuthenticationConfigsResponse, + ]: + r"""Return a callable for the list backend authentication + configs method over gRPC. + + Lists BackendAuthenticationConfigs in a given project + and location. + + Returns: + Callable[[~.ListBackendAuthenticationConfigsRequest], + ~.ListBackendAuthenticationConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backend_authentication_configs" not in self._stubs: + self._stubs[ + "list_backend_authentication_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListBackendAuthenticationConfigs", + request_serializer=backend_authentication_config.ListBackendAuthenticationConfigsRequest.serialize, + response_deserializer=backend_authentication_config.ListBackendAuthenticationConfigsResponse.deserialize, + ) + return self._stubs["list_backend_authentication_configs"] + + @property + def get_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.GetBackendAuthenticationConfigRequest], + backend_authentication_config.BackendAuthenticationConfig, + ]: + r"""Return a callable for the get backend authentication + config method over gRPC. + + Gets details of a single BackendAuthenticationConfig + to BackendAuthenticationConfig. + + Returns: + Callable[[~.GetBackendAuthenticationConfigRequest], + ~.BackendAuthenticationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backend_authentication_config" not in self._stubs: + self._stubs[ + "get_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetBackendAuthenticationConfig", + request_serializer=backend_authentication_config.GetBackendAuthenticationConfigRequest.serialize, + response_deserializer=backend_authentication_config.BackendAuthenticationConfig.deserialize, + ) + return self._stubs["get_backend_authentication_config"] + + @property + def create_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create backend authentication + config method over gRPC. + + Creates a new BackendAuthenticationConfig in a given + project and location. + + Returns: + Callable[[~.CreateBackendAuthenticationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backend_authentication_config" not in self._stubs: + self._stubs[ + "create_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateBackendAuthenticationConfig", + request_serializer=gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backend_authentication_config"] + + @property + def update_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update backend authentication + config method over gRPC. + + Updates the parameters of a single + BackendAuthenticationConfig to + BackendAuthenticationConfig. + + Returns: + Callable[[~.UpdateBackendAuthenticationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backend_authentication_config" not in self._stubs: + self._stubs[ + "update_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateBackendAuthenticationConfig", + request_serializer=gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backend_authentication_config"] + + @property + def delete_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.DeleteBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete backend authentication + config method over gRPC. + + Deletes a single BackendAuthenticationConfig to + BackendAuthenticationConfig. + + Returns: + Callable[[~.DeleteBackendAuthenticationConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backend_authentication_config" not in self._stubs: + self._stubs[ + "delete_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteBackendAuthenticationConfig", + request_serializer=backend_authentication_config.DeleteBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backend_authentication_config"] + + @property + def list_server_tls_policies( + self, + ) -> Callable[ + [server_tls_policy.ListServerTlsPoliciesRequest], + server_tls_policy.ListServerTlsPoliciesResponse, + ]: + r"""Return a callable for the list server tls policies method over gRPC. + + Lists ServerTlsPolicies in a given project and location. Returns: - Callable[[~.CreateClientTlsPolicyRequest], + Callable[[~.ListServerTlsPoliciesRequest], + ~.ListServerTlsPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_server_tls_policies" not in self._stubs: + self._stubs["list_server_tls_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListServerTlsPolicies", + request_serializer=server_tls_policy.ListServerTlsPoliciesRequest.serialize, + response_deserializer=server_tls_policy.ListServerTlsPoliciesResponse.deserialize, + ) + return self._stubs["list_server_tls_policies"] + + @property + def get_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.GetServerTlsPolicyRequest], server_tls_policy.ServerTlsPolicy + ]: + r"""Return a callable for the get server tls policy method over gRPC. + + Gets details of a single ServerTlsPolicy. + + Returns: + Callable[[~.GetServerTlsPolicyRequest], + ~.ServerTlsPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_tls_policy" not in self._stubs: + self._stubs["get_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetServerTlsPolicy", + request_serializer=server_tls_policy.GetServerTlsPolicyRequest.serialize, + response_deserializer=server_tls_policy.ServerTlsPolicy.deserialize, + ) + return self._stubs["get_server_tls_policy"] + + @property + def create_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.CreateServerTlsPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create server tls policy method over gRPC. + + Creates a new ServerTlsPolicy in a given project and + location. + + Returns: + Callable[[~.CreateServerTlsPolicyRequest], ~.Operation]: A function that, when called, will call the underlying RPC on the server. @@ -426,26 +778,26 @@ def create_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_client_tls_policy" not in self._stubs: - self._stubs["create_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateClientTlsPolicy", - request_serializer=gcn_client_tls_policy.CreateClientTlsPolicyRequest.serialize, + if "create_server_tls_policy" not in self._stubs: + self._stubs["create_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateServerTlsPolicy", + request_serializer=gcn_server_tls_policy.CreateServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["create_client_tls_policy"] + return self._stubs["create_server_tls_policy"] @property - def update_client_tls_policy( + def update_server_tls_policy( self, ) -> Callable[ - [gcn_client_tls_policy.UpdateClientTlsPolicyRequest], operations_pb2.Operation + [gcn_server_tls_policy.UpdateServerTlsPolicyRequest], operations_pb2.Operation ]: - r"""Return a callable for the update client tls policy method over gRPC. + r"""Return a callable for the update server tls policy method over gRPC. - Updates the parameters of a single ClientTlsPolicy. + Updates the parameters of a single ServerTlsPolicy. Returns: - Callable[[~.UpdateClientTlsPolicyRequest], + Callable[[~.UpdateServerTlsPolicyRequest], ~.Operation]: A function that, when called, will call the underlying RPC on the server. @@ -454,26 +806,26 @@ def update_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_client_tls_policy" not in self._stubs: - self._stubs["update_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateClientTlsPolicy", - request_serializer=gcn_client_tls_policy.UpdateClientTlsPolicyRequest.serialize, + if "update_server_tls_policy" not in self._stubs: + self._stubs["update_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateServerTlsPolicy", + request_serializer=gcn_server_tls_policy.UpdateServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["update_client_tls_policy"] + return self._stubs["update_server_tls_policy"] @property - def delete_client_tls_policy( + def delete_server_tls_policy( self, ) -> Callable[ - [client_tls_policy.DeleteClientTlsPolicyRequest], operations_pb2.Operation + [server_tls_policy.DeleteServerTlsPolicyRequest], operations_pb2.Operation ]: - r"""Return a callable for the delete client tls policy method over gRPC. + r"""Return a callable for the delete server tls policy method over gRPC. - Deletes a single ClientTlsPolicy. + Deletes a single ServerTlsPolicy. Returns: - Callable[[~.DeleteClientTlsPolicyRequest], + Callable[[~.DeleteServerTlsPolicyRequest], ~.Operation]: A function that, when called, will call the underlying RPC on the server. @@ -482,13 +834,899 @@ def delete_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_client_tls_policy" not in self._stubs: - self._stubs["delete_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteClientTlsPolicy", - request_serializer=client_tls_policy.DeleteClientTlsPolicyRequest.serialize, + if "delete_server_tls_policy" not in self._stubs: + self._stubs["delete_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteServerTlsPolicy", + request_serializer=server_tls_policy.DeleteServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["delete_client_tls_policy"] + return self._stubs["delete_server_tls_policy"] + + @property + def list_client_tls_policies( + self, + ) -> Callable[ + [client_tls_policy.ListClientTlsPoliciesRequest], + client_tls_policy.ListClientTlsPoliciesResponse, + ]: + r"""Return a callable for the list client tls policies method over gRPC. + + Lists ClientTlsPolicies in a given project and + location. + + Returns: + Callable[[~.ListClientTlsPoliciesRequest], + ~.ListClientTlsPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_client_tls_policies" not in self._stubs: + self._stubs["list_client_tls_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListClientTlsPolicies", + request_serializer=client_tls_policy.ListClientTlsPoliciesRequest.serialize, + response_deserializer=client_tls_policy.ListClientTlsPoliciesResponse.deserialize, + ) + return self._stubs["list_client_tls_policies"] + + @property + def get_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.GetClientTlsPolicyRequest], client_tls_policy.ClientTlsPolicy + ]: + r"""Return a callable for the get client tls policy method over gRPC. + + Gets details of a single ClientTlsPolicy. + + Returns: + Callable[[~.GetClientTlsPolicyRequest], + ~.ClientTlsPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_client_tls_policy" not in self._stubs: + self._stubs["get_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetClientTlsPolicy", + request_serializer=client_tls_policy.GetClientTlsPolicyRequest.serialize, + response_deserializer=client_tls_policy.ClientTlsPolicy.deserialize, + ) + return self._stubs["get_client_tls_policy"] + + @property + def create_client_tls_policy( + self, + ) -> Callable[ + [gcn_client_tls_policy.CreateClientTlsPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create client tls policy method over gRPC. + + Creates a new ClientTlsPolicy in a given project and + location. + + Returns: + Callable[[~.CreateClientTlsPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_client_tls_policy" not in self._stubs: + self._stubs["create_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateClientTlsPolicy", + request_serializer=gcn_client_tls_policy.CreateClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_client_tls_policy"] + + @property + def update_client_tls_policy( + self, + ) -> Callable[ + [gcn_client_tls_policy.UpdateClientTlsPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update client tls policy method over gRPC. + + Updates the parameters of a single ClientTlsPolicy. + + Returns: + Callable[[~.UpdateClientTlsPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_client_tls_policy" not in self._stubs: + self._stubs["update_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateClientTlsPolicy", + request_serializer=gcn_client_tls_policy.UpdateClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_client_tls_policy"] + + @property + def delete_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.DeleteClientTlsPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete client tls policy method over gRPC. + + Deletes a single ClientTlsPolicy. + + Returns: + Callable[[~.DeleteClientTlsPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_client_tls_policy" not in self._stubs: + self._stubs["delete_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteClientTlsPolicy", + request_serializer=client_tls_policy.DeleteClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_client_tls_policy"] + + @property + def list_gateway_security_policies( + self, + ) -> Callable[ + [gateway_security_policy.ListGatewaySecurityPoliciesRequest], + gateway_security_policy.ListGatewaySecurityPoliciesResponse, + ]: + r"""Return a callable for the list gateway security policies method over gRPC. + + Lists GatewaySecurityPolicies in a given project and + location. + + Returns: + Callable[[~.ListGatewaySecurityPoliciesRequest], + ~.ListGatewaySecurityPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_gateway_security_policies" not in self._stubs: + self._stubs[ + "list_gateway_security_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListGatewaySecurityPolicies", + request_serializer=gateway_security_policy.ListGatewaySecurityPoliciesRequest.serialize, + response_deserializer=gateway_security_policy.ListGatewaySecurityPoliciesResponse.deserialize, + ) + return self._stubs["list_gateway_security_policies"] + + @property + def get_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.GetGatewaySecurityPolicyRequest], + gateway_security_policy.GatewaySecurityPolicy, + ]: + r"""Return a callable for the get gateway security policy method over gRPC. + + Gets details of a single GatewaySecurityPolicy. + + Returns: + Callable[[~.GetGatewaySecurityPolicyRequest], + ~.GatewaySecurityPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_gateway_security_policy" not in self._stubs: + self._stubs[ + "get_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetGatewaySecurityPolicy", + request_serializer=gateway_security_policy.GetGatewaySecurityPolicyRequest.serialize, + response_deserializer=gateway_security_policy.GatewaySecurityPolicy.deserialize, + ) + return self._stubs["get_gateway_security_policy"] + + @property + def create_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create gateway security policy method over gRPC. + + Creates a new GatewaySecurityPolicy in a given + project and location. + + Returns: + Callable[[~.CreateGatewaySecurityPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_gateway_security_policy" not in self._stubs: + self._stubs[ + "create_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateGatewaySecurityPolicy", + request_serializer=gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_gateway_security_policy"] + + @property + def update_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update gateway security policy method over gRPC. + + Updates the parameters of a single + GatewaySecurityPolicy. + + Returns: + Callable[[~.UpdateGatewaySecurityPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_gateway_security_policy" not in self._stubs: + self._stubs[ + "update_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateGatewaySecurityPolicy", + request_serializer=gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_gateway_security_policy"] + + @property + def delete_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.DeleteGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete gateway security policy method over gRPC. + + Deletes a single GatewaySecurityPolicy. + + Returns: + Callable[[~.DeleteGatewaySecurityPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_gateway_security_policy" not in self._stubs: + self._stubs[ + "delete_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteGatewaySecurityPolicy", + request_serializer=gateway_security_policy.DeleteGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_gateway_security_policy"] + + @property + def list_gateway_security_policy_rules( + self, + ) -> Callable[ + [gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest], + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + ]: + r"""Return a callable for the list gateway security policy + rules method over gRPC. + + Lists GatewaySecurityPolicyRules in a given project + and location. + + Returns: + Callable[[~.ListGatewaySecurityPolicyRulesRequest], + ~.ListGatewaySecurityPolicyRulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_gateway_security_policy_rules" not in self._stubs: + self._stubs[ + "list_gateway_security_policy_rules" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListGatewaySecurityPolicyRules", + request_serializer=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest.serialize, + response_deserializer=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.deserialize, + ) + return self._stubs["list_gateway_security_policy_rules"] + + @property + def get_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest], + gateway_security_policy_rule.GatewaySecurityPolicyRule, + ]: + r"""Return a callable for the get gateway security policy + rule method over gRPC. + + Gets details of a single GatewaySecurityPolicyRule. + + Returns: + Callable[[~.GetGatewaySecurityPolicyRuleRequest], + ~.GatewaySecurityPolicyRule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "get_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetGatewaySecurityPolicyRule", + request_serializer=gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=gateway_security_policy_rule.GatewaySecurityPolicyRule.deserialize, + ) + return self._stubs["get_gateway_security_policy_rule"] + + @property + def create_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create gateway security policy + rule method over gRPC. + + Creates a new GatewaySecurityPolicy in a given + project and location. + + Returns: + Callable[[~.CreateGatewaySecurityPolicyRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "create_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateGatewaySecurityPolicyRule", + request_serializer=gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_gateway_security_policy_rule"] + + @property + def update_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update gateway security policy + rule method over gRPC. + + Updates the parameters of a single + GatewaySecurityPolicyRule. + + Returns: + Callable[[~.UpdateGatewaySecurityPolicyRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "update_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateGatewaySecurityPolicyRule", + request_serializer=gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_gateway_security_policy_rule"] + + @property + def delete_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete gateway security policy + rule method over gRPC. + + Deletes a single GatewaySecurityPolicyRule. + + Returns: + Callable[[~.DeleteGatewaySecurityPolicyRuleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "delete_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteGatewaySecurityPolicyRule", + request_serializer=gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_gateway_security_policy_rule"] + + @property + def list_url_lists( + self, + ) -> Callable[[url_list.ListUrlListsRequest], url_list.ListUrlListsResponse]: + r"""Return a callable for the list url lists method over gRPC. + + Lists UrlLists in a given project and location. + + Returns: + Callable[[~.ListUrlListsRequest], + ~.ListUrlListsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_url_lists" not in self._stubs: + self._stubs["list_url_lists"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListUrlLists", + request_serializer=url_list.ListUrlListsRequest.serialize, + response_deserializer=url_list.ListUrlListsResponse.deserialize, + ) + return self._stubs["list_url_lists"] + + @property + def get_url_list(self) -> Callable[[url_list.GetUrlListRequest], url_list.UrlList]: + r"""Return a callable for the get url list method over gRPC. + + Gets details of a single UrlList. + + Returns: + Callable[[~.GetUrlListRequest], + ~.UrlList]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_url_list" not in self._stubs: + self._stubs["get_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetUrlList", + request_serializer=url_list.GetUrlListRequest.serialize, + response_deserializer=url_list.UrlList.deserialize, + ) + return self._stubs["get_url_list"] + + @property + def create_url_list( + self, + ) -> Callable[[gcn_url_list.CreateUrlListRequest], operations_pb2.Operation]: + r"""Return a callable for the create url list method over gRPC. + + Creates a new UrlList in a given project and + location. + + Returns: + Callable[[~.CreateUrlListRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_url_list" not in self._stubs: + self._stubs["create_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateUrlList", + request_serializer=gcn_url_list.CreateUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_url_list"] + + @property + def update_url_list( + self, + ) -> Callable[[gcn_url_list.UpdateUrlListRequest], operations_pb2.Operation]: + r"""Return a callable for the update url list method over gRPC. + + Updates the parameters of a single UrlList. + + Returns: + Callable[[~.UpdateUrlListRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_url_list" not in self._stubs: + self._stubs["update_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateUrlList", + request_serializer=gcn_url_list.UpdateUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_url_list"] + + @property + def delete_url_list( + self, + ) -> Callable[[url_list.DeleteUrlListRequest], operations_pb2.Operation]: + r"""Return a callable for the delete url list method over gRPC. + + Deletes a single UrlList. + + Returns: + Callable[[~.DeleteUrlListRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_url_list" not in self._stubs: + self._stubs["delete_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteUrlList", + request_serializer=url_list.DeleteUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_url_list"] + + @property + def list_tls_inspection_policies( + self, + ) -> Callable[ + [tls_inspection_policy.ListTlsInspectionPoliciesRequest], + tls_inspection_policy.ListTlsInspectionPoliciesResponse, + ]: + r"""Return a callable for the list tls inspection policies method over gRPC. + + Lists TlsInspectionPolicies in a given project and + location. + + Returns: + Callable[[~.ListTlsInspectionPoliciesRequest], + ~.ListTlsInspectionPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tls_inspection_policies" not in self._stubs: + self._stubs[ + "list_tls_inspection_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListTlsInspectionPolicies", + request_serializer=tls_inspection_policy.ListTlsInspectionPoliciesRequest.serialize, + response_deserializer=tls_inspection_policy.ListTlsInspectionPoliciesResponse.deserialize, + ) + return self._stubs["list_tls_inspection_policies"] + + @property + def get_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.GetTlsInspectionPolicyRequest], + tls_inspection_policy.TlsInspectionPolicy, + ]: + r"""Return a callable for the get tls inspection policy method over gRPC. + + Gets details of a single TlsInspectionPolicy. + + Returns: + Callable[[~.GetTlsInspectionPolicyRequest], + ~.TlsInspectionPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tls_inspection_policy" not in self._stubs: + self._stubs["get_tls_inspection_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetTlsInspectionPolicy", + request_serializer=tls_inspection_policy.GetTlsInspectionPolicyRequest.serialize, + response_deserializer=tls_inspection_policy.TlsInspectionPolicy.deserialize, + ) + return self._stubs["get_tls_inspection_policy"] + + @property + def create_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create tls inspection policy method over gRPC. + + Creates a new TlsInspectionPolicy in a given project + and location. + + Returns: + Callable[[~.CreateTlsInspectionPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tls_inspection_policy" not in self._stubs: + self._stubs[ + "create_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateTlsInspectionPolicy", + request_serializer=gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tls_inspection_policy"] + + @property + def update_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update tls inspection policy method over gRPC. + + Updates the parameters of a single + TlsInspectionPolicy. + + Returns: + Callable[[~.UpdateTlsInspectionPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tls_inspection_policy" not in self._stubs: + self._stubs[ + "update_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateTlsInspectionPolicy", + request_serializer=gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_tls_inspection_policy"] + + @property + def delete_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.DeleteTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete tls inspection policy method over gRPC. + + Deletes a single TlsInspectionPolicy. + + Returns: + Callable[[~.DeleteTlsInspectionPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tls_inspection_policy" not in self._stubs: + self._stubs[ + "delete_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteTlsInspectionPolicy", + request_serializer=tls_inspection_policy.DeleteTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_tls_inspection_policy"] + + @property + def list_authz_policies( + self, + ) -> Callable[ + [authz_policy.ListAuthzPoliciesRequest], authz_policy.ListAuthzPoliciesResponse + ]: + r"""Return a callable for the list authz policies method over gRPC. + + Lists AuthzPolicies in a given project and location. + + Returns: + Callable[[~.ListAuthzPoliciesRequest], + ~.ListAuthzPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_authz_policies" not in self._stubs: + self._stubs["list_authz_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListAuthzPolicies", + request_serializer=authz_policy.ListAuthzPoliciesRequest.serialize, + response_deserializer=authz_policy.ListAuthzPoliciesResponse.deserialize, + ) + return self._stubs["list_authz_policies"] + + @property + def get_authz_policy( + self, + ) -> Callable[[authz_policy.GetAuthzPolicyRequest], authz_policy.AuthzPolicy]: + r"""Return a callable for the get authz policy method over gRPC. + + Gets details of a single AuthzPolicy. + + Returns: + Callable[[~.GetAuthzPolicyRequest], + ~.AuthzPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authz_policy" not in self._stubs: + self._stubs["get_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetAuthzPolicy", + request_serializer=authz_policy.GetAuthzPolicyRequest.serialize, + response_deserializer=authz_policy.AuthzPolicy.deserialize, + ) + return self._stubs["get_authz_policy"] + + @property + def create_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.CreateAuthzPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create authz policy method over gRPC. + + Creates a new AuthzPolicy in a given project and + location. + + Returns: + Callable[[~.CreateAuthzPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authz_policy" not in self._stubs: + self._stubs["create_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateAuthzPolicy", + request_serializer=gcn_authz_policy.CreateAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authz_policy"] + + @property + def update_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.UpdateAuthzPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update authz policy method over gRPC. + + Updates the parameters of a single AuthzPolicy. + + Returns: + Callable[[~.UpdateAuthzPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authz_policy" not in self._stubs: + self._stubs["update_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateAuthzPolicy", + request_serializer=gcn_authz_policy.UpdateAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authz_policy"] + + @property + def delete_authz_policy( + self, + ) -> Callable[[authz_policy.DeleteAuthzPolicyRequest], operations_pb2.Operation]: + r"""Return a callable for the delete authz policy method over gRPC. + + Deletes a single AuthzPolicy. + + Returns: + Callable[[~.DeleteAuthzPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authz_policy" not in self._stubs: + self._stubs["delete_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteAuthzPolicy", + request_serializer=authz_policy.DeleteAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_authz_policy"] def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc_asyncio.py index cd210032bfd0..19d329546621 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc_asyncio.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/grpc_asyncio.py @@ -35,10 +35,40 @@ from grpc.experimental import aio # type: ignore import proto # type: ignore +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .base import DEFAULT_CLIENT_INFO, NetworkSecurityTransport from .grpc import NetworkSecurityGrpcTransport @@ -356,20 +386,20 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def list_client_tls_policies( + def list_authorization_policies( self, ) -> Callable[ - [client_tls_policy.ListClientTlsPoliciesRequest], - Awaitable[client_tls_policy.ListClientTlsPoliciesResponse], + [authorization_policy.ListAuthorizationPoliciesRequest], + Awaitable[authorization_policy.ListAuthorizationPoliciesResponse], ]: - r"""Return a callable for the list client tls policies method over gRPC. + r"""Return a callable for the list authorization policies method over gRPC. - Lists ClientTlsPolicies in a given project and + Lists AuthorizationPolicies in a given project and location. Returns: - Callable[[~.ListClientTlsPoliciesRequest], - Awaitable[~.ListClientTlsPoliciesResponse]]: + Callable[[~.ListAuthorizationPoliciesRequest], + Awaitable[~.ListAuthorizationPoliciesResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -377,28 +407,30 @@ def list_client_tls_policies( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_client_tls_policies" not in self._stubs: - self._stubs["list_client_tls_policies"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListClientTlsPolicies", - request_serializer=client_tls_policy.ListClientTlsPoliciesRequest.serialize, - response_deserializer=client_tls_policy.ListClientTlsPoliciesResponse.deserialize, + if "list_authorization_policies" not in self._stubs: + self._stubs[ + "list_authorization_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListAuthorizationPolicies", + request_serializer=authorization_policy.ListAuthorizationPoliciesRequest.serialize, + response_deserializer=authorization_policy.ListAuthorizationPoliciesResponse.deserialize, ) - return self._stubs["list_client_tls_policies"] + return self._stubs["list_authorization_policies"] @property - def get_client_tls_policy( + def get_authorization_policy( self, ) -> Callable[ - [client_tls_policy.GetClientTlsPolicyRequest], - Awaitable[client_tls_policy.ClientTlsPolicy], + [authorization_policy.GetAuthorizationPolicyRequest], + Awaitable[authorization_policy.AuthorizationPolicy], ]: - r"""Return a callable for the get client tls policy method over gRPC. + r"""Return a callable for the get authorization policy method over gRPC. - Gets details of a single ClientTlsPolicy. + Gets details of a single AuthorizationPolicy. Returns: - Callable[[~.GetClientTlsPolicyRequest], - Awaitable[~.ClientTlsPolicy]]: + Callable[[~.GetAuthorizationPolicyRequest], + Awaitable[~.AuthorizationPolicy]]: A function that, when called, will call the underlying RPC on the server. """ @@ -406,28 +438,350 @@ def get_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_client_tls_policy" not in self._stubs: - self._stubs["get_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetClientTlsPolicy", - request_serializer=client_tls_policy.GetClientTlsPolicyRequest.serialize, - response_deserializer=client_tls_policy.ClientTlsPolicy.deserialize, + if "get_authorization_policy" not in self._stubs: + self._stubs["get_authorization_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetAuthorizationPolicy", + request_serializer=authorization_policy.GetAuthorizationPolicyRequest.serialize, + response_deserializer=authorization_policy.AuthorizationPolicy.deserialize, ) - return self._stubs["get_client_tls_policy"] + return self._stubs["get_authorization_policy"] @property - def create_client_tls_policy( + def create_authorization_policy( self, ) -> Callable[ - [gcn_client_tls_policy.CreateClientTlsPolicyRequest], + [gcn_authorization_policy.CreateAuthorizationPolicyRequest], Awaitable[operations_pb2.Operation], ]: - r"""Return a callable for the create client tls policy method over gRPC. + r"""Return a callable for the create authorization policy method over gRPC. - Creates a new ClientTlsPolicy in a given project and + Creates a new AuthorizationPolicy in a given project + and location. + + Returns: + Callable[[~.CreateAuthorizationPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authorization_policy" not in self._stubs: + self._stubs[ + "create_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateAuthorizationPolicy", + request_serializer=gcn_authorization_policy.CreateAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authorization_policy"] + + @property + def update_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.UpdateAuthorizationPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update authorization policy method over gRPC. + + Updates the parameters of a single + AuthorizationPolicy. + + Returns: + Callable[[~.UpdateAuthorizationPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authorization_policy" not in self._stubs: + self._stubs[ + "update_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateAuthorizationPolicy", + request_serializer=gcn_authorization_policy.UpdateAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authorization_policy"] + + @property + def delete_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.DeleteAuthorizationPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete authorization policy method over gRPC. + + Deletes a single AuthorizationPolicy. + + Returns: + Callable[[~.DeleteAuthorizationPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authorization_policy" not in self._stubs: + self._stubs[ + "delete_authorization_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteAuthorizationPolicy", + request_serializer=authorization_policy.DeleteAuthorizationPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_authorization_policy"] + + @property + def list_backend_authentication_configs( + self, + ) -> Callable[ + [backend_authentication_config.ListBackendAuthenticationConfigsRequest], + Awaitable[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse + ], + ]: + r"""Return a callable for the list backend authentication + configs method over gRPC. + + Lists BackendAuthenticationConfigs in a given project + and location. + + Returns: + Callable[[~.ListBackendAuthenticationConfigsRequest], + Awaitable[~.ListBackendAuthenticationConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backend_authentication_configs" not in self._stubs: + self._stubs[ + "list_backend_authentication_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListBackendAuthenticationConfigs", + request_serializer=backend_authentication_config.ListBackendAuthenticationConfigsRequest.serialize, + response_deserializer=backend_authentication_config.ListBackendAuthenticationConfigsResponse.deserialize, + ) + return self._stubs["list_backend_authentication_configs"] + + @property + def get_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.GetBackendAuthenticationConfigRequest], + Awaitable[backend_authentication_config.BackendAuthenticationConfig], + ]: + r"""Return a callable for the get backend authentication + config method over gRPC. + + Gets details of a single BackendAuthenticationConfig + to BackendAuthenticationConfig. + + Returns: + Callable[[~.GetBackendAuthenticationConfigRequest], + Awaitable[~.BackendAuthenticationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backend_authentication_config" not in self._stubs: + self._stubs[ + "get_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetBackendAuthenticationConfig", + request_serializer=backend_authentication_config.GetBackendAuthenticationConfigRequest.serialize, + response_deserializer=backend_authentication_config.BackendAuthenticationConfig.deserialize, + ) + return self._stubs["get_backend_authentication_config"] + + @property + def create_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backend authentication + config method over gRPC. + + Creates a new BackendAuthenticationConfig in a given + project and location. + + Returns: + Callable[[~.CreateBackendAuthenticationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backend_authentication_config" not in self._stubs: + self._stubs[ + "create_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateBackendAuthenticationConfig", + request_serializer=gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backend_authentication_config"] + + @property + def update_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update backend authentication + config method over gRPC. + + Updates the parameters of a single + BackendAuthenticationConfig to + BackendAuthenticationConfig. + + Returns: + Callable[[~.UpdateBackendAuthenticationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backend_authentication_config" not in self._stubs: + self._stubs[ + "update_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateBackendAuthenticationConfig", + request_serializer=gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backend_authentication_config"] + + @property + def delete_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.DeleteBackendAuthenticationConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete backend authentication + config method over gRPC. + + Deletes a single BackendAuthenticationConfig to + BackendAuthenticationConfig. + + Returns: + Callable[[~.DeleteBackendAuthenticationConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backend_authentication_config" not in self._stubs: + self._stubs[ + "delete_backend_authentication_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteBackendAuthenticationConfig", + request_serializer=backend_authentication_config.DeleteBackendAuthenticationConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backend_authentication_config"] + + @property + def list_server_tls_policies( + self, + ) -> Callable[ + [server_tls_policy.ListServerTlsPoliciesRequest], + Awaitable[server_tls_policy.ListServerTlsPoliciesResponse], + ]: + r"""Return a callable for the list server tls policies method over gRPC. + + Lists ServerTlsPolicies in a given project and location. Returns: - Callable[[~.CreateClientTlsPolicyRequest], + Callable[[~.ListServerTlsPoliciesRequest], + Awaitable[~.ListServerTlsPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_server_tls_policies" not in self._stubs: + self._stubs["list_server_tls_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListServerTlsPolicies", + request_serializer=server_tls_policy.ListServerTlsPoliciesRequest.serialize, + response_deserializer=server_tls_policy.ListServerTlsPoliciesResponse.deserialize, + ) + return self._stubs["list_server_tls_policies"] + + @property + def get_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.GetServerTlsPolicyRequest], + Awaitable[server_tls_policy.ServerTlsPolicy], + ]: + r"""Return a callable for the get server tls policy method over gRPC. + + Gets details of a single ServerTlsPolicy. + + Returns: + Callable[[~.GetServerTlsPolicyRequest], + Awaitable[~.ServerTlsPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_tls_policy" not in self._stubs: + self._stubs["get_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetServerTlsPolicy", + request_serializer=server_tls_policy.GetServerTlsPolicyRequest.serialize, + response_deserializer=server_tls_policy.ServerTlsPolicy.deserialize, + ) + return self._stubs["get_server_tls_policy"] + + @property + def create_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.CreateServerTlsPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create server tls policy method over gRPC. + + Creates a new ServerTlsPolicy in a given project and + location. + + Returns: + Callable[[~.CreateServerTlsPolicyRequest], Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. @@ -436,27 +790,27 @@ def create_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_client_tls_policy" not in self._stubs: - self._stubs["create_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateClientTlsPolicy", - request_serializer=gcn_client_tls_policy.CreateClientTlsPolicyRequest.serialize, + if "create_server_tls_policy" not in self._stubs: + self._stubs["create_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateServerTlsPolicy", + request_serializer=gcn_server_tls_policy.CreateServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["create_client_tls_policy"] + return self._stubs["create_server_tls_policy"] @property - def update_client_tls_policy( + def update_server_tls_policy( self, ) -> Callable[ - [gcn_client_tls_policy.UpdateClientTlsPolicyRequest], + [gcn_server_tls_policy.UpdateServerTlsPolicyRequest], Awaitable[operations_pb2.Operation], ]: - r"""Return a callable for the update client tls policy method over gRPC. + r"""Return a callable for the update server tls policy method over gRPC. - Updates the parameters of a single ClientTlsPolicy. + Updates the parameters of a single ServerTlsPolicy. Returns: - Callable[[~.UpdateClientTlsPolicyRequest], + Callable[[~.UpdateServerTlsPolicyRequest], Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. @@ -465,27 +819,27 @@ def update_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_client_tls_policy" not in self._stubs: - self._stubs["update_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateClientTlsPolicy", - request_serializer=gcn_client_tls_policy.UpdateClientTlsPolicyRequest.serialize, + if "update_server_tls_policy" not in self._stubs: + self._stubs["update_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateServerTlsPolicy", + request_serializer=gcn_server_tls_policy.UpdateServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["update_client_tls_policy"] + return self._stubs["update_server_tls_policy"] @property - def delete_client_tls_policy( + def delete_server_tls_policy( self, ) -> Callable[ - [client_tls_policy.DeleteClientTlsPolicyRequest], + [server_tls_policy.DeleteServerTlsPolicyRequest], Awaitable[operations_pb2.Operation], ]: - r"""Return a callable for the delete client tls policy method over gRPC. + r"""Return a callable for the delete server tls policy method over gRPC. - Deletes a single ClientTlsPolicy. + Deletes a single ServerTlsPolicy. Returns: - Callable[[~.DeleteClientTlsPolicyRequest], + Callable[[~.DeleteServerTlsPolicyRequest], Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. @@ -494,21 +848,999 @@ def delete_client_tls_policy( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_client_tls_policy" not in self._stubs: - self._stubs["delete_client_tls_policy"] = self._logged_channel.unary_unary( - "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteClientTlsPolicy", - request_serializer=client_tls_policy.DeleteClientTlsPolicyRequest.serialize, + if "delete_server_tls_policy" not in self._stubs: + self._stubs["delete_server_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteServerTlsPolicy", + request_serializer=server_tls_policy.DeleteServerTlsPolicyRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["delete_client_tls_policy"] + return self._stubs["delete_server_tls_policy"] - def _prep_wrapped_messages(self, client_info): - """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_client_tls_policies: self._wrap_method( - self.list_client_tls_policies, - default_timeout=None, - client_info=client_info, + @property + def list_client_tls_policies( + self, + ) -> Callable[ + [client_tls_policy.ListClientTlsPoliciesRequest], + Awaitable[client_tls_policy.ListClientTlsPoliciesResponse], + ]: + r"""Return a callable for the list client tls policies method over gRPC. + + Lists ClientTlsPolicies in a given project and + location. + + Returns: + Callable[[~.ListClientTlsPoliciesRequest], + Awaitable[~.ListClientTlsPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_client_tls_policies" not in self._stubs: + self._stubs["list_client_tls_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListClientTlsPolicies", + request_serializer=client_tls_policy.ListClientTlsPoliciesRequest.serialize, + response_deserializer=client_tls_policy.ListClientTlsPoliciesResponse.deserialize, + ) + return self._stubs["list_client_tls_policies"] + + @property + def get_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.GetClientTlsPolicyRequest], + Awaitable[client_tls_policy.ClientTlsPolicy], + ]: + r"""Return a callable for the get client tls policy method over gRPC. + + Gets details of a single ClientTlsPolicy. + + Returns: + Callable[[~.GetClientTlsPolicyRequest], + Awaitable[~.ClientTlsPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_client_tls_policy" not in self._stubs: + self._stubs["get_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetClientTlsPolicy", + request_serializer=client_tls_policy.GetClientTlsPolicyRequest.serialize, + response_deserializer=client_tls_policy.ClientTlsPolicy.deserialize, + ) + return self._stubs["get_client_tls_policy"] + + @property + def create_client_tls_policy( + self, + ) -> Callable[ + [gcn_client_tls_policy.CreateClientTlsPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create client tls policy method over gRPC. + + Creates a new ClientTlsPolicy in a given project and + location. + + Returns: + Callable[[~.CreateClientTlsPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_client_tls_policy" not in self._stubs: + self._stubs["create_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateClientTlsPolicy", + request_serializer=gcn_client_tls_policy.CreateClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_client_tls_policy"] + + @property + def update_client_tls_policy( + self, + ) -> Callable[ + [gcn_client_tls_policy.UpdateClientTlsPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update client tls policy method over gRPC. + + Updates the parameters of a single ClientTlsPolicy. + + Returns: + Callable[[~.UpdateClientTlsPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_client_tls_policy" not in self._stubs: + self._stubs["update_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateClientTlsPolicy", + request_serializer=gcn_client_tls_policy.UpdateClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_client_tls_policy"] + + @property + def delete_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.DeleteClientTlsPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete client tls policy method over gRPC. + + Deletes a single ClientTlsPolicy. + + Returns: + Callable[[~.DeleteClientTlsPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_client_tls_policy" not in self._stubs: + self._stubs["delete_client_tls_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteClientTlsPolicy", + request_serializer=client_tls_policy.DeleteClientTlsPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_client_tls_policy"] + + @property + def list_gateway_security_policies( + self, + ) -> Callable[ + [gateway_security_policy.ListGatewaySecurityPoliciesRequest], + Awaitable[gateway_security_policy.ListGatewaySecurityPoliciesResponse], + ]: + r"""Return a callable for the list gateway security policies method over gRPC. + + Lists GatewaySecurityPolicies in a given project and + location. + + Returns: + Callable[[~.ListGatewaySecurityPoliciesRequest], + Awaitable[~.ListGatewaySecurityPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_gateway_security_policies" not in self._stubs: + self._stubs[ + "list_gateway_security_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListGatewaySecurityPolicies", + request_serializer=gateway_security_policy.ListGatewaySecurityPoliciesRequest.serialize, + response_deserializer=gateway_security_policy.ListGatewaySecurityPoliciesResponse.deserialize, + ) + return self._stubs["list_gateway_security_policies"] + + @property + def get_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.GetGatewaySecurityPolicyRequest], + Awaitable[gateway_security_policy.GatewaySecurityPolicy], + ]: + r"""Return a callable for the get gateway security policy method over gRPC. + + Gets details of a single GatewaySecurityPolicy. + + Returns: + Callable[[~.GetGatewaySecurityPolicyRequest], + Awaitable[~.GatewaySecurityPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_gateway_security_policy" not in self._stubs: + self._stubs[ + "get_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetGatewaySecurityPolicy", + request_serializer=gateway_security_policy.GetGatewaySecurityPolicyRequest.serialize, + response_deserializer=gateway_security_policy.GatewaySecurityPolicy.deserialize, + ) + return self._stubs["get_gateway_security_policy"] + + @property + def create_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create gateway security policy method over gRPC. + + Creates a new GatewaySecurityPolicy in a given + project and location. + + Returns: + Callable[[~.CreateGatewaySecurityPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_gateway_security_policy" not in self._stubs: + self._stubs[ + "create_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateGatewaySecurityPolicy", + request_serializer=gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_gateway_security_policy"] + + @property + def update_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update gateway security policy method over gRPC. + + Updates the parameters of a single + GatewaySecurityPolicy. + + Returns: + Callable[[~.UpdateGatewaySecurityPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_gateway_security_policy" not in self._stubs: + self._stubs[ + "update_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateGatewaySecurityPolicy", + request_serializer=gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_gateway_security_policy"] + + @property + def delete_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.DeleteGatewaySecurityPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete gateway security policy method over gRPC. + + Deletes a single GatewaySecurityPolicy. + + Returns: + Callable[[~.DeleteGatewaySecurityPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_gateway_security_policy" not in self._stubs: + self._stubs[ + "delete_gateway_security_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteGatewaySecurityPolicy", + request_serializer=gateway_security_policy.DeleteGatewaySecurityPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_gateway_security_policy"] + + @property + def list_gateway_security_policy_rules( + self, + ) -> Callable[ + [gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest], + Awaitable[gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse], + ]: + r"""Return a callable for the list gateway security policy + rules method over gRPC. + + Lists GatewaySecurityPolicyRules in a given project + and location. + + Returns: + Callable[[~.ListGatewaySecurityPolicyRulesRequest], + Awaitable[~.ListGatewaySecurityPolicyRulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_gateway_security_policy_rules" not in self._stubs: + self._stubs[ + "list_gateway_security_policy_rules" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListGatewaySecurityPolicyRules", + request_serializer=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest.serialize, + response_deserializer=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.deserialize, + ) + return self._stubs["list_gateway_security_policy_rules"] + + @property + def get_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest], + Awaitable[gateway_security_policy_rule.GatewaySecurityPolicyRule], + ]: + r"""Return a callable for the get gateway security policy + rule method over gRPC. + + Gets details of a single GatewaySecurityPolicyRule. + + Returns: + Callable[[~.GetGatewaySecurityPolicyRuleRequest], + Awaitable[~.GatewaySecurityPolicyRule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "get_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetGatewaySecurityPolicyRule", + request_serializer=gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=gateway_security_policy_rule.GatewaySecurityPolicyRule.deserialize, + ) + return self._stubs["get_gateway_security_policy_rule"] + + @property + def create_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create gateway security policy + rule method over gRPC. + + Creates a new GatewaySecurityPolicy in a given + project and location. + + Returns: + Callable[[~.CreateGatewaySecurityPolicyRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "create_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateGatewaySecurityPolicyRule", + request_serializer=gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_gateway_security_policy_rule"] + + @property + def update_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update gateway security policy + rule method over gRPC. + + Updates the parameters of a single + GatewaySecurityPolicyRule. + + Returns: + Callable[[~.UpdateGatewaySecurityPolicyRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "update_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateGatewaySecurityPolicyRule", + request_serializer=gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_gateway_security_policy_rule"] + + @property + def delete_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete gateway security policy + rule method over gRPC. + + Deletes a single GatewaySecurityPolicyRule. + + Returns: + Callable[[~.DeleteGatewaySecurityPolicyRuleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_gateway_security_policy_rule" not in self._stubs: + self._stubs[ + "delete_gateway_security_policy_rule" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteGatewaySecurityPolicyRule", + request_serializer=gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_gateway_security_policy_rule"] + + @property + def list_url_lists( + self, + ) -> Callable[ + [url_list.ListUrlListsRequest], Awaitable[url_list.ListUrlListsResponse] + ]: + r"""Return a callable for the list url lists method over gRPC. + + Lists UrlLists in a given project and location. + + Returns: + Callable[[~.ListUrlListsRequest], + Awaitable[~.ListUrlListsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_url_lists" not in self._stubs: + self._stubs["list_url_lists"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListUrlLists", + request_serializer=url_list.ListUrlListsRequest.serialize, + response_deserializer=url_list.ListUrlListsResponse.deserialize, + ) + return self._stubs["list_url_lists"] + + @property + def get_url_list( + self, + ) -> Callable[[url_list.GetUrlListRequest], Awaitable[url_list.UrlList]]: + r"""Return a callable for the get url list method over gRPC. + + Gets details of a single UrlList. + + Returns: + Callable[[~.GetUrlListRequest], + Awaitable[~.UrlList]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_url_list" not in self._stubs: + self._stubs["get_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetUrlList", + request_serializer=url_list.GetUrlListRequest.serialize, + response_deserializer=url_list.UrlList.deserialize, + ) + return self._stubs["get_url_list"] + + @property + def create_url_list( + self, + ) -> Callable[ + [gcn_url_list.CreateUrlListRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create url list method over gRPC. + + Creates a new UrlList in a given project and + location. + + Returns: + Callable[[~.CreateUrlListRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_url_list" not in self._stubs: + self._stubs["create_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateUrlList", + request_serializer=gcn_url_list.CreateUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_url_list"] + + @property + def update_url_list( + self, + ) -> Callable[ + [gcn_url_list.UpdateUrlListRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update url list method over gRPC. + + Updates the parameters of a single UrlList. + + Returns: + Callable[[~.UpdateUrlListRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_url_list" not in self._stubs: + self._stubs["update_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateUrlList", + request_serializer=gcn_url_list.UpdateUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_url_list"] + + @property + def delete_url_list( + self, + ) -> Callable[[url_list.DeleteUrlListRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete url list method over gRPC. + + Deletes a single UrlList. + + Returns: + Callable[[~.DeleteUrlListRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_url_list" not in self._stubs: + self._stubs["delete_url_list"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteUrlList", + request_serializer=url_list.DeleteUrlListRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_url_list"] + + @property + def list_tls_inspection_policies( + self, + ) -> Callable[ + [tls_inspection_policy.ListTlsInspectionPoliciesRequest], + Awaitable[tls_inspection_policy.ListTlsInspectionPoliciesResponse], + ]: + r"""Return a callable for the list tls inspection policies method over gRPC. + + Lists TlsInspectionPolicies in a given project and + location. + + Returns: + Callable[[~.ListTlsInspectionPoliciesRequest], + Awaitable[~.ListTlsInspectionPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tls_inspection_policies" not in self._stubs: + self._stubs[ + "list_tls_inspection_policies" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListTlsInspectionPolicies", + request_serializer=tls_inspection_policy.ListTlsInspectionPoliciesRequest.serialize, + response_deserializer=tls_inspection_policy.ListTlsInspectionPoliciesResponse.deserialize, + ) + return self._stubs["list_tls_inspection_policies"] + + @property + def get_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.GetTlsInspectionPolicyRequest], + Awaitable[tls_inspection_policy.TlsInspectionPolicy], + ]: + r"""Return a callable for the get tls inspection policy method over gRPC. + + Gets details of a single TlsInspectionPolicy. + + Returns: + Callable[[~.GetTlsInspectionPolicyRequest], + Awaitable[~.TlsInspectionPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tls_inspection_policy" not in self._stubs: + self._stubs["get_tls_inspection_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetTlsInspectionPolicy", + request_serializer=tls_inspection_policy.GetTlsInspectionPolicyRequest.serialize, + response_deserializer=tls_inspection_policy.TlsInspectionPolicy.deserialize, + ) + return self._stubs["get_tls_inspection_policy"] + + @property + def create_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create tls inspection policy method over gRPC. + + Creates a new TlsInspectionPolicy in a given project + and location. + + Returns: + Callable[[~.CreateTlsInspectionPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tls_inspection_policy" not in self._stubs: + self._stubs[ + "create_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateTlsInspectionPolicy", + request_serializer=gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tls_inspection_policy"] + + @property + def update_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update tls inspection policy method over gRPC. + + Updates the parameters of a single + TlsInspectionPolicy. + + Returns: + Callable[[~.UpdateTlsInspectionPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tls_inspection_policy" not in self._stubs: + self._stubs[ + "update_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateTlsInspectionPolicy", + request_serializer=gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_tls_inspection_policy"] + + @property + def delete_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.DeleteTlsInspectionPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete tls inspection policy method over gRPC. + + Deletes a single TlsInspectionPolicy. + + Returns: + Callable[[~.DeleteTlsInspectionPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tls_inspection_policy" not in self._stubs: + self._stubs[ + "delete_tls_inspection_policy" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteTlsInspectionPolicy", + request_serializer=tls_inspection_policy.DeleteTlsInspectionPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_tls_inspection_policy"] + + @property + def list_authz_policies( + self, + ) -> Callable[ + [authz_policy.ListAuthzPoliciesRequest], + Awaitable[authz_policy.ListAuthzPoliciesResponse], + ]: + r"""Return a callable for the list authz policies method over gRPC. + + Lists AuthzPolicies in a given project and location. + + Returns: + Callable[[~.ListAuthzPoliciesRequest], + Awaitable[~.ListAuthzPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_authz_policies" not in self._stubs: + self._stubs["list_authz_policies"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/ListAuthzPolicies", + request_serializer=authz_policy.ListAuthzPoliciesRequest.serialize, + response_deserializer=authz_policy.ListAuthzPoliciesResponse.deserialize, + ) + return self._stubs["list_authz_policies"] + + @property + def get_authz_policy( + self, + ) -> Callable[ + [authz_policy.GetAuthzPolicyRequest], Awaitable[authz_policy.AuthzPolicy] + ]: + r"""Return a callable for the get authz policy method over gRPC. + + Gets details of a single AuthzPolicy. + + Returns: + Callable[[~.GetAuthzPolicyRequest], + Awaitable[~.AuthzPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authz_policy" not in self._stubs: + self._stubs["get_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/GetAuthzPolicy", + request_serializer=authz_policy.GetAuthzPolicyRequest.serialize, + response_deserializer=authz_policy.AuthzPolicy.deserialize, + ) + return self._stubs["get_authz_policy"] + + @property + def create_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.CreateAuthzPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create authz policy method over gRPC. + + Creates a new AuthzPolicy in a given project and + location. + + Returns: + Callable[[~.CreateAuthzPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authz_policy" not in self._stubs: + self._stubs["create_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/CreateAuthzPolicy", + request_serializer=gcn_authz_policy.CreateAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authz_policy"] + + @property + def update_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.UpdateAuthzPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update authz policy method over gRPC. + + Updates the parameters of a single AuthzPolicy. + + Returns: + Callable[[~.UpdateAuthzPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authz_policy" not in self._stubs: + self._stubs["update_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/UpdateAuthzPolicy", + request_serializer=gcn_authz_policy.UpdateAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authz_policy"] + + @property + def delete_authz_policy( + self, + ) -> Callable[ + [authz_policy.DeleteAuthzPolicyRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete authz policy method over gRPC. + + Deletes a single AuthzPolicy. + + Returns: + Callable[[~.DeleteAuthzPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authz_policy" not in self._stubs: + self._stubs["delete_authz_policy"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.NetworkSecurity/DeleteAuthzPolicy", + request_serializer=authz_policy.DeleteAuthzPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_authz_policy"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_authorization_policies: self._wrap_method( + self.list_authorization_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_authorization_policy: self._wrap_method( + self.get_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_authorization_policy: self._wrap_method( + self.create_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_authorization_policy: self._wrap_method( + self.update_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_authorization_policy: self._wrap_method( + self.delete_authorization_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_backend_authentication_configs: self._wrap_method( + self.list_backend_authentication_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_backend_authentication_config: self._wrap_method( + self.get_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.create_backend_authentication_config: self._wrap_method( + self.create_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.update_backend_authentication_config: self._wrap_method( + self.update_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_backend_authentication_config: self._wrap_method( + self.delete_backend_authentication_config, + default_timeout=None, + client_info=client_info, + ), + self.list_server_tls_policies: self._wrap_method( + self.list_server_tls_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_server_tls_policy: self._wrap_method( + self.get_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_server_tls_policy: self._wrap_method( + self.create_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_server_tls_policy: self._wrap_method( + self.update_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_server_tls_policy: self._wrap_method( + self.delete_server_tls_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_client_tls_policies: self._wrap_method( + self.list_client_tls_policies, + default_timeout=None, + client_info=client_info, ), self.get_client_tls_policy: self._wrap_method( self.get_client_tls_policy, @@ -530,6 +1862,131 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_gateway_security_policies: self._wrap_method( + self.list_gateway_security_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_gateway_security_policy: self._wrap_method( + self.get_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_gateway_security_policy: self._wrap_method( + self.create_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_gateway_security_policy: self._wrap_method( + self.update_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_gateway_security_policy: self._wrap_method( + self.delete_gateway_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_gateway_security_policy_rules: self._wrap_method( + self.list_gateway_security_policy_rules, + default_timeout=None, + client_info=client_info, + ), + self.get_gateway_security_policy_rule: self._wrap_method( + self.get_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.create_gateway_security_policy_rule: self._wrap_method( + self.create_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.update_gateway_security_policy_rule: self._wrap_method( + self.update_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.delete_gateway_security_policy_rule: self._wrap_method( + self.delete_gateway_security_policy_rule, + default_timeout=None, + client_info=client_info, + ), + self.list_url_lists: self._wrap_method( + self.list_url_lists, + default_timeout=None, + client_info=client_info, + ), + self.get_url_list: self._wrap_method( + self.get_url_list, + default_timeout=None, + client_info=client_info, + ), + self.create_url_list: self._wrap_method( + self.create_url_list, + default_timeout=None, + client_info=client_info, + ), + self.update_url_list: self._wrap_method( + self.update_url_list, + default_timeout=None, + client_info=client_info, + ), + self.delete_url_list: self._wrap_method( + self.delete_url_list, + default_timeout=None, + client_info=client_info, + ), + self.list_tls_inspection_policies: self._wrap_method( + self.list_tls_inspection_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_tls_inspection_policy: self._wrap_method( + self.get_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_tls_inspection_policy: self._wrap_method( + self.create_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_tls_inspection_policy: self._wrap_method( + self.update_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_tls_inspection_policy: self._wrap_method( + self.delete_tls_inspection_policy, + default_timeout=None, + client_info=client_info, + ), + self.list_authz_policies: self._wrap_method( + self.list_authz_policies, + default_timeout=None, + client_info=client_info, + ), + self.get_authz_policy: self._wrap_method( + self.get_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.create_authz_policy: self._wrap_method( + self.create_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.update_authz_policy: self._wrap_method( + self.update_authz_policy, + default_timeout=None, + client_info=client_info, + ), + self.delete_authz_policy: self._wrap_method( + self.delete_authz_policy, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest.py index fd0736b3a7ae..cc7806d9d748 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest.py @@ -32,10 +32,40 @@ from google.protobuf import json_format from requests import __version__ as requests_version +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .rest_base import _BaseNetworkSecurityRestTransport @@ -79,6 +109,30 @@ class NetworkSecurityRestInterceptor: .. code-block:: python class MyCustomNetworkSecurityInterceptor(NetworkSecurityRestInterceptor): + def pre_create_authorization_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_authorization_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_authz_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_authz_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backend_authentication_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backend_authentication_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_client_tls_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -87,679 +141,9223 @@ def post_create_client_tls_policy(self, response): logging.log(f"Received response: {response}") return response - def pre_delete_client_tls_policy(self, request, metadata): + def pre_create_gateway_security_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_delete_client_tls_policy(self, response): + def post_create_gateway_security_policy(self, response): logging.log(f"Received response: {response}") return response - def pre_get_client_tls_policy(self, request, metadata): + def pre_create_gateway_security_policy_rule(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_client_tls_policy(self, response): + def post_create_gateway_security_policy_rule(self, response): logging.log(f"Received response: {response}") return response - def pre_list_client_tls_policies(self, request, metadata): + def pre_create_server_tls_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_client_tls_policies(self, response): + def post_create_server_tls_policy(self, response): logging.log(f"Received response: {response}") return response - def pre_update_client_tls_policy(self, request, metadata): + def pre_create_tls_inspection_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_client_tls_policy(self, response): + def post_create_tls_inspection_policy(self, response): logging.log(f"Received response: {response}") return response - transport = NetworkSecurityRestTransport(interceptor=MyCustomNetworkSecurityInterceptor()) - client = NetworkSecurityClient(transport=transport) + def pre_create_url_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def post_create_url_list(self, response): + logging.log(f"Received response: {response}") + return response - """ + def pre_delete_authorization_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def pre_create_client_tls_policy( - self, - request: gcn_client_tls_policy.CreateClientTlsPolicyRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - gcn_client_tls_policy.CreateClientTlsPolicyRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for create_client_tls_policy + def post_delete_authorization_policy(self, response): + logging.log(f"Received response: {response}") + return response - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + def pre_delete_authz_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def post_create_client_tls_policy( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_client_tls_policy + def post_delete_authz_policy(self, response): + logging.log(f"Received response: {response}") + return response - DEPRECATED. Please use the `post_create_client_tls_policy_with_metadata` - interceptor instead. + def pre_delete_backend_authentication_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. This `post_create_client_tls_policy` interceptor runs - before the `post_create_client_tls_policy_with_metadata` interceptor. - """ - return response + def post_delete_backend_authentication_config(self, response): + logging.log(f"Received response: {response}") + return response - def post_create_client_tls_policy_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_client_tls_policy + def pre_delete_client_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response or metadata after it - is returned by the NetworkSecurity server but before it is returned to user code. + def post_delete_client_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response - We recommend only using this `post_create_client_tls_policy_with_metadata` - interceptor in new development instead of the `post_create_client_tls_policy` interceptor. - When both interceptors are used, this `post_create_client_tls_policy_with_metadata` interceptor runs after the - `post_create_client_tls_policy` interceptor. The (possibly modified) response returned by - `post_create_client_tls_policy` will be passed to - `post_create_client_tls_policy_with_metadata`. - """ - return response, metadata + def pre_delete_gateway_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def pre_delete_client_tls_policy( - self, - request: client_tls_policy.DeleteClientTlsPolicyRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - client_tls_policy.DeleteClientTlsPolicyRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for delete_client_tls_policy + def post_delete_gateway_security_policy(self, response): + logging.log(f"Received response: {response}") + return response - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + def pre_delete_gateway_security_policy_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def post_delete_client_tls_policy( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_client_tls_policy + def post_delete_gateway_security_policy_rule(self, response): + logging.log(f"Received response: {response}") + return response - DEPRECATED. Please use the `post_delete_client_tls_policy_with_metadata` - interceptor instead. + def pre_delete_server_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. This `post_delete_client_tls_policy` interceptor runs - before the `post_delete_client_tls_policy_with_metadata` interceptor. - """ - return response + def post_delete_server_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response - def post_delete_client_tls_policy_with_metadata( - self, - response: operations_pb2.Operation, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_client_tls_policy + def pre_delete_tls_inspection_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response or metadata after it - is returned by the NetworkSecurity server but before it is returned to user code. + def post_delete_tls_inspection_policy(self, response): + logging.log(f"Received response: {response}") + return response - We recommend only using this `post_delete_client_tls_policy_with_metadata` - interceptor in new development instead of the `post_delete_client_tls_policy` interceptor. - When both interceptors are used, this `post_delete_client_tls_policy_with_metadata` interceptor runs after the - `post_delete_client_tls_policy` interceptor. The (possibly modified) response returned by - `post_delete_client_tls_policy` will be passed to - `post_delete_client_tls_policy_with_metadata`. - """ - return response, metadata + def pre_delete_url_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def pre_get_client_tls_policy( - self, - request: client_tls_policy.GetClientTlsPolicyRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - client_tls_policy.GetClientTlsPolicyRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for get_client_tls_policy + def post_delete_url_list(self, response): + logging.log(f"Received response: {response}") + return response - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + def pre_get_authorization_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def post_get_client_tls_policy( - self, response: client_tls_policy.ClientTlsPolicy - ) -> client_tls_policy.ClientTlsPolicy: - """Post-rpc interceptor for get_client_tls_policy + def post_get_authorization_policy(self, response): + logging.log(f"Received response: {response}") + return response - DEPRECATED. Please use the `post_get_client_tls_policy_with_metadata` - interceptor instead. + def pre_get_authz_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. This `post_get_client_tls_policy` interceptor runs - before the `post_get_client_tls_policy_with_metadata` interceptor. - """ - return response + def post_get_authz_policy(self, response): + logging.log(f"Received response: {response}") + return response - def post_get_client_tls_policy_with_metadata( - self, - response: client_tls_policy.ClientTlsPolicy, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - client_tls_policy.ClientTlsPolicy, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for get_client_tls_policy + def pre_get_backend_authentication_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response or metadata after it - is returned by the NetworkSecurity server but before it is returned to user code. + def post_get_backend_authentication_config(self, response): + logging.log(f"Received response: {response}") + return response - We recommend only using this `post_get_client_tls_policy_with_metadata` - interceptor in new development instead of the `post_get_client_tls_policy` interceptor. - When both interceptors are used, this `post_get_client_tls_policy_with_metadata` interceptor runs after the - `post_get_client_tls_policy` interceptor. The (possibly modified) response returned by - `post_get_client_tls_policy` will be passed to - `post_get_client_tls_policy_with_metadata`. - """ - return response, metadata + def pre_get_client_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def pre_list_client_tls_policies( - self, - request: client_tls_policy.ListClientTlsPoliciesRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - client_tls_policy.ListClientTlsPoliciesRequest, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Pre-rpc interceptor for list_client_tls_policies + def post_get_client_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + def pre_get_gateway_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def post_list_client_tls_policies( - self, response: client_tls_policy.ListClientTlsPoliciesResponse - ) -> client_tls_policy.ListClientTlsPoliciesResponse: - """Post-rpc interceptor for list_client_tls_policies + def post_get_gateway_security_policy(self, response): + logging.log(f"Received response: {response}") + return response - DEPRECATED. Please use the `post_list_client_tls_policies_with_metadata` - interceptor instead. + def pre_get_gateway_security_policy_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. This `post_list_client_tls_policies` interceptor runs - before the `post_list_client_tls_policies_with_metadata` interceptor. - """ - return response + def post_get_gateway_security_policy_rule(self, response): + logging.log(f"Received response: {response}") + return response - def post_list_client_tls_policies_with_metadata( - self, - response: client_tls_policy.ListClientTlsPoliciesResponse, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - client_tls_policy.ListClientTlsPoliciesResponse, - Sequence[Tuple[str, Union[str, bytes]]], - ]: - """Post-rpc interceptor for list_client_tls_policies + def pre_get_server_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - Override in a subclass to read or manipulate the response or metadata after it - is returned by the NetworkSecurity server but before it is returned to user code. + def post_get_server_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response - We recommend only using this `post_list_client_tls_policies_with_metadata` - interceptor in new development instead of the `post_list_client_tls_policies` interceptor. - When both interceptors are used, this `post_list_client_tls_policies_with_metadata` interceptor runs after the - `post_list_client_tls_policies` interceptor. The (possibly modified) response returned by - `post_list_client_tls_policies` will be passed to - `post_list_client_tls_policies_with_metadata`. - """ - return response, metadata + def pre_get_tls_inspection_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata - def pre_update_client_tls_policy( + def post_get_tls_inspection_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_url_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_url_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_authorization_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_authorization_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_authz_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_authz_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backend_authentication_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backend_authentication_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_client_tls_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_client_tls_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_gateway_security_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_gateway_security_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_gateway_security_policy_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_gateway_security_policy_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_server_tls_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_server_tls_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tls_inspection_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tls_inspection_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_url_lists(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_url_lists(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_authorization_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_authorization_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_authz_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_authz_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backend_authentication_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backend_authentication_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_client_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_client_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_gateway_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_gateway_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_gateway_security_policy_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_gateway_security_policy_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_server_tls_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_server_tls_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_tls_inspection_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_tls_inspection_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_url_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_url_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworkSecurityRestTransport(interceptor=MyCustomNetworkSecurityInterceptor()) + client = NetworkSecurityClient(transport=transport) + + + """ + + def pre_create_authorization_policy( self, - request: gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + request: gcn_authorization_policy.CreateAuthorizationPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + gcn_authorization_policy.CreateAuthorizationPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for update_client_tls_policy + """Pre-rpc interceptor for create_authorization_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_update_client_tls_policy( + def post_create_authorization_policy( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_client_tls_policy + """Post-rpc interceptor for create_authorization_policy - DEPRECATED. Please use the `post_update_client_tls_policy_with_metadata` + DEPRECATED. Please use the `post_create_authorization_policy_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. This `post_update_client_tls_policy` interceptor runs - before the `post_update_client_tls_policy_with_metadata` interceptor. + it is returned to user code. This `post_create_authorization_policy` interceptor runs + before the `post_create_authorization_policy_with_metadata` interceptor. """ return response - def post_update_client_tls_policy_with_metadata( + def post_create_authorization_policy_with_metadata( self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_client_tls_policy + """Post-rpc interceptor for create_authorization_policy Override in a subclass to read or manipulate the response or metadata after it is returned by the NetworkSecurity server but before it is returned to user code. - We recommend only using this `post_update_client_tls_policy_with_metadata` - interceptor in new development instead of the `post_update_client_tls_policy` interceptor. - When both interceptors are used, this `post_update_client_tls_policy_with_metadata` interceptor runs after the - `post_update_client_tls_policy` interceptor. The (possibly modified) response returned by - `post_update_client_tls_policy` will be passed to - `post_update_client_tls_policy_with_metadata`. + We recommend only using this `post_create_authorization_policy_with_metadata` + interceptor in new development instead of the `post_create_authorization_policy` interceptor. + When both interceptors are used, this `post_create_authorization_policy_with_metadata` interceptor runs after the + `post_create_authorization_policy` interceptor. The (possibly modified) response returned by + `post_create_authorization_policy` will be passed to + `post_create_authorization_policy_with_metadata`. """ return response, metadata - def pre_get_location( + def pre_create_authz_policy( self, - request: locations_pb2.GetLocationRequest, + request: gcn_authz_policy.CreateAuthzPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_authz_policy.CreateAuthzPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for get_location + """Pre-rpc interceptor for create_authz_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_create_authz_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_authz_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_authz_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_authz_policy` interceptor runs + before the `post_create_authz_policy_with_metadata` interceptor. """ return response - def pre_list_locations( + def post_create_authz_policy_with_metadata( self, - request: locations_pb2.ListLocationsRequest, + response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_authz_policy - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. - Override in a subclass to manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. + We recommend only using this `post_create_authz_policy_with_metadata` + interceptor in new development instead of the `post_create_authz_policy` interceptor. + When both interceptors are used, this `post_create_authz_policy_with_metadata` interceptor runs after the + `post_create_authz_policy` interceptor. The (possibly modified) response returned by + `post_create_authz_policy` will be passed to + `post_create_authz_policy_with_metadata`. """ - return response + return response, metadata - def pre_get_iam_policy( + def pre_create_backend_authentication_config( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for get_iam_policy + """Pre-rpc interceptor for create_backend_authentication_config Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_create_backend_authentication_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backend_authentication_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backend_authentication_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_backend_authentication_config` interceptor runs + before the `post_create_backend_authentication_config_with_metadata` interceptor. """ return response - def pre_set_iam_policy( + def post_create_backend_authentication_config_with_metadata( self, - request: iam_policy_pb2.SetIamPolicyRequest, + response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the NetworkSecurity server. - """ - return request, metadata + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backend_authentication_config - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. - Override in a subclass to manipulate the response - after it is returned by the NetworkSecurity server but before - it is returned to user code. + We recommend only using this `post_create_backend_authentication_config_with_metadata` + interceptor in new development instead of the `post_create_backend_authentication_config` interceptor. + When both interceptors are used, this `post_create_backend_authentication_config_with_metadata` interceptor runs after the + `post_create_backend_authentication_config` interceptor. The (possibly modified) response returned by + `post_create_backend_authentication_config` will be passed to + `post_create_backend_authentication_config_with_metadata`. """ - return response + return response, metadata - def pre_test_iam_permissions( + def pre_create_client_tls_policy( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: gcn_client_tls_policy.CreateClientTlsPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - iam_policy_pb2.TestIamPermissionsRequest, + gcn_client_tls_policy.CreateClientTlsPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for test_iam_permissions + """Pre-rpc interceptor for create_client_tls_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_create_client_tls_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_client_tls_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_client_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_client_tls_policy` interceptor runs + before the `post_create_client_tls_policy_with_metadata` interceptor. """ return response - def pre_cancel_operation( + def post_create_client_tls_policy_with_metadata( self, - request: operations_pb2.CancelOperationRequest, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_client_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_create_client_tls_policy_with_metadata` + interceptor in new development instead of the `post_create_client_tls_policy` interceptor. + When both interceptors are used, this `post_create_client_tls_policy_with_metadata` interceptor runs after the + `post_create_client_tls_policy` interceptor. The (possibly modified) response returned by + `post_create_client_tls_policy` will be passed to + `post_create_client_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_create_gateway_security_policy( + self, + request: gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for cancel_operation + """Pre-rpc interceptor for create_gateway_security_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation + def post_create_gateway_security_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_gateway_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_gateway_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_gateway_security_policy` interceptor runs + before the `post_create_gateway_security_policy_with_metadata` interceptor. """ return response - def pre_delete_operation( + def post_create_gateway_security_policy_with_metadata( self, - request: operations_pb2.DeleteOperationRequest, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_gateway_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_create_gateway_security_policy_with_metadata` + interceptor in new development instead of the `post_create_gateway_security_policy` interceptor. + When both interceptors are used, this `post_create_gateway_security_policy_with_metadata` interceptor runs after the + `post_create_gateway_security_policy` interceptor. The (possibly modified) response returned by + `post_create_gateway_security_policy` will be passed to + `post_create_gateway_security_policy_with_metadata`. + """ + return response, metadata + + def pre_create_gateway_security_policy_rule( + self, + request: gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for delete_operation + """Pre-rpc interceptor for create_gateway_security_policy_rule Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_create_gateway_security_policy_rule( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_gateway_security_policy_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_gateway_security_policy_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_gateway_security_policy_rule` interceptor runs + before the `post_create_gateway_security_policy_rule_with_metadata` interceptor. """ return response - def pre_get_operation( + def post_create_gateway_security_policy_rule_with_metadata( self, - request: operations_pb2.GetOperationRequest, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_gateway_security_policy_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_create_gateway_security_policy_rule_with_metadata` + interceptor in new development instead of the `post_create_gateway_security_policy_rule` interceptor. + When both interceptors are used, this `post_create_gateway_security_policy_rule_with_metadata` interceptor runs after the + `post_create_gateway_security_policy_rule` interceptor. The (possibly modified) response returned by + `post_create_gateway_security_policy_rule` will be passed to + `post_create_gateway_security_policy_rule_with_metadata`. + """ + return response, metadata + + def pre_create_server_tls_policy( + self, + request: gcn_server_tls_policy.CreateServerTlsPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_server_tls_policy.CreateServerTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for get_operation + """Pre-rpc interceptor for create_server_tls_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_get_operation( + def post_create_server_tls_policy( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + """Post-rpc interceptor for create_server_tls_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_server_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_server_tls_policy` interceptor runs + before the `post_create_server_tls_policy_with_metadata` interceptor. """ return response - def pre_list_operations( + def post_create_server_tls_policy_with_metadata( self, - request: operations_pb2.ListOperationsRequest, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_server_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_create_server_tls_policy_with_metadata` + interceptor in new development instead of the `post_create_server_tls_policy` interceptor. + When both interceptors are used, this `post_create_server_tls_policy_with_metadata` interceptor runs after the + `post_create_server_tls_policy` interceptor. The (possibly modified) response returned by + `post_create_server_tls_policy` will be passed to + `post_create_server_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_create_tls_inspection_policy( + self, + request: gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for list_operations + """Pre-rpc interceptor for create_tls_inspection_policy Override in a subclass to manipulate the request or metadata before they are sent to the NetworkSecurity server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_create_tls_inspection_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_tls_inspection_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tls_inspection_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkSecurity server but before - it is returned to user code. + it is returned to user code. This `post_create_tls_inspection_policy` interceptor runs + before the `post_create_tls_inspection_policy_with_metadata` interceptor. """ return response + def post_create_tls_inspection_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tls_inspection_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. -@dataclasses.dataclass -class NetworkSecurityRestStub: - _session: AuthorizedSession - _host: str - _interceptor: NetworkSecurityRestInterceptor + We recommend only using this `post_create_tls_inspection_policy_with_metadata` + interceptor in new development instead of the `post_create_tls_inspection_policy` interceptor. + When both interceptors are used, this `post_create_tls_inspection_policy_with_metadata` interceptor runs after the + `post_create_tls_inspection_policy` interceptor. The (possibly modified) response returned by + `post_create_tls_inspection_policy` will be passed to + `post_create_tls_inspection_policy_with_metadata`. + """ + return response, metadata + def pre_create_url_list( + self, + request: gcn_url_list.CreateUrlListRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_url_list.CreateUrlListRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_url_list -class NetworkSecurityRestTransport(_BaseNetworkSecurityRestTransport): - """REST backend synchronous transport for NetworkSecurity. + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata - Network Security API provides resources to configure - authentication and authorization policies. Refer to per API - resource documentation for more information. + def post_create_url_list( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_url_list - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + DEPRECATED. Please use the `post_create_url_list_with_metadata` + interceptor instead. - It sends JSON representations of protocol buffers over HTTP/1.1 - """ + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_create_url_list` interceptor runs + before the `post_create_url_list_with_metadata` interceptor. + """ + return response - def __init__( + def post_create_url_list_with_metadata( self, - *, - host: str = "networksecurity.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[NetworkSecurityRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_url_list - Args: - host (Optional[str]): - The hostname to connect to (default: 'networksecurity.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + We recommend only using this `post_create_url_list_with_metadata` + interceptor in new development instead of the `post_create_url_list` interceptor. + When both interceptors are used, this `post_create_url_list_with_metadata` interceptor runs after the + `post_create_url_list` interceptor. The (possibly modified) response returned by + `post_create_url_list` will be passed to + `post_create_url_list_with_metadata`. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or NetworkSecurityRestInterceptor() - self._prep_wrapped_messages(client_info) + return response, metadata - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + def pre_delete_authorization_policy( + self, + request: authorization_policy.DeleteAuthorizationPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authorization_policy.DeleteAuthorizationPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_authorization_policy - This property caches on the instance; repeated calls return the same - client. + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, + return request, metadata + + def post_delete_authorization_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_authorization_policy + + DEPRECATED. Please use the `post_delete_authorization_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_authorization_policy` interceptor runs + before the `post_delete_authorization_policy_with_metadata` interceptor. + """ + return response + + def post_delete_authorization_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_authorization_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_authorization_policy_with_metadata` + interceptor in new development instead of the `post_delete_authorization_policy` interceptor. + When both interceptors are used, this `post_delete_authorization_policy_with_metadata` interceptor runs after the + `post_delete_authorization_policy` interceptor. The (possibly modified) response returned by + `post_delete_authorization_policy` will be passed to + `post_delete_authorization_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_authz_policy( + self, + request: authz_policy.DeleteAuthzPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authz_policy.DeleteAuthzPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_authz_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_authz_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_authz_policy + + DEPRECATED. Please use the `post_delete_authz_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_authz_policy` interceptor runs + before the `post_delete_authz_policy_with_metadata` interceptor. + """ + return response + + def post_delete_authz_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_authz_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_authz_policy_with_metadata` + interceptor in new development instead of the `post_delete_authz_policy` interceptor. + When both interceptors are used, this `post_delete_authz_policy_with_metadata` interceptor runs after the + `post_delete_authz_policy` interceptor. The (possibly modified) response returned by + `post_delete_authz_policy` will be passed to + `post_delete_authz_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_backend_authentication_config( + self, + request: backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_backend_authentication_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_backend_authentication_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backend_authentication_config + + DEPRECATED. Please use the `post_delete_backend_authentication_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_backend_authentication_config` interceptor runs + before the `post_delete_backend_authentication_config_with_metadata` interceptor. + """ + return response + + def post_delete_backend_authentication_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backend_authentication_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_backend_authentication_config_with_metadata` + interceptor in new development instead of the `post_delete_backend_authentication_config` interceptor. + When both interceptors are used, this `post_delete_backend_authentication_config_with_metadata` interceptor runs after the + `post_delete_backend_authentication_config` interceptor. The (possibly modified) response returned by + `post_delete_backend_authentication_config` will be passed to + `post_delete_backend_authentication_config_with_metadata`. + """ + return response, metadata + + def pre_delete_client_tls_policy( + self, + request: client_tls_policy.DeleteClientTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_tls_policy.DeleteClientTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_client_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_client_tls_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_client_tls_policy + + DEPRECATED. Please use the `post_delete_client_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_client_tls_policy` interceptor runs + before the `post_delete_client_tls_policy_with_metadata` interceptor. + """ + return response + + def post_delete_client_tls_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_client_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_client_tls_policy_with_metadata` + interceptor in new development instead of the `post_delete_client_tls_policy` interceptor. + When both interceptors are used, this `post_delete_client_tls_policy_with_metadata` interceptor runs after the + `post_delete_client_tls_policy` interceptor. The (possibly modified) response returned by + `post_delete_client_tls_policy` will be passed to + `post_delete_client_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_gateway_security_policy( + self, + request: gateway_security_policy.DeleteGatewaySecurityPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy.DeleteGatewaySecurityPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_gateway_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_gateway_security_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_gateway_security_policy + + DEPRECATED. Please use the `post_delete_gateway_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_gateway_security_policy` interceptor runs + before the `post_delete_gateway_security_policy_with_metadata` interceptor. + """ + return response + + def post_delete_gateway_security_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_gateway_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_gateway_security_policy_with_metadata` + interceptor in new development instead of the `post_delete_gateway_security_policy` interceptor. + When both interceptors are used, this `post_delete_gateway_security_policy_with_metadata` interceptor runs after the + `post_delete_gateway_security_policy` interceptor. The (possibly modified) response returned by + `post_delete_gateway_security_policy` will be passed to + `post_delete_gateway_security_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_gateway_security_policy_rule( + self, + request: gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_gateway_security_policy_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_gateway_security_policy_rule( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_gateway_security_policy_rule + + DEPRECATED. Please use the `post_delete_gateway_security_policy_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_gateway_security_policy_rule` interceptor runs + before the `post_delete_gateway_security_policy_rule_with_metadata` interceptor. + """ + return response + + def post_delete_gateway_security_policy_rule_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_gateway_security_policy_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_gateway_security_policy_rule_with_metadata` + interceptor in new development instead of the `post_delete_gateway_security_policy_rule` interceptor. + When both interceptors are used, this `post_delete_gateway_security_policy_rule_with_metadata` interceptor runs after the + `post_delete_gateway_security_policy_rule` interceptor. The (possibly modified) response returned by + `post_delete_gateway_security_policy_rule` will be passed to + `post_delete_gateway_security_policy_rule_with_metadata`. + """ + return response, metadata + + def pre_delete_server_tls_policy( + self, + request: server_tls_policy.DeleteServerTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + server_tls_policy.DeleteServerTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_server_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_server_tls_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_server_tls_policy + + DEPRECATED. Please use the `post_delete_server_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_server_tls_policy` interceptor runs + before the `post_delete_server_tls_policy_with_metadata` interceptor. + """ + return response + + def post_delete_server_tls_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_server_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_server_tls_policy_with_metadata` + interceptor in new development instead of the `post_delete_server_tls_policy` interceptor. + When both interceptors are used, this `post_delete_server_tls_policy_with_metadata` interceptor runs after the + `post_delete_server_tls_policy` interceptor. The (possibly modified) response returned by + `post_delete_server_tls_policy` will be passed to + `post_delete_server_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_tls_inspection_policy( + self, + request: tls_inspection_policy.DeleteTlsInspectionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + tls_inspection_policy.DeleteTlsInspectionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_tls_inspection_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_tls_inspection_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_tls_inspection_policy + + DEPRECATED. Please use the `post_delete_tls_inspection_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_tls_inspection_policy` interceptor runs + before the `post_delete_tls_inspection_policy_with_metadata` interceptor. + """ + return response + + def post_delete_tls_inspection_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_tls_inspection_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_tls_inspection_policy_with_metadata` + interceptor in new development instead of the `post_delete_tls_inspection_policy` interceptor. + When both interceptors are used, this `post_delete_tls_inspection_policy_with_metadata` interceptor runs after the + `post_delete_tls_inspection_policy` interceptor. The (possibly modified) response returned by + `post_delete_tls_inspection_policy` will be passed to + `post_delete_tls_inspection_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_url_list( + self, + request: url_list.DeleteUrlListRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[url_list.DeleteUrlListRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_url_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_url_list( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_url_list + + DEPRECATED. Please use the `post_delete_url_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_delete_url_list` interceptor runs + before the `post_delete_url_list_with_metadata` interceptor. + """ + return response + + def post_delete_url_list_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_url_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_delete_url_list_with_metadata` + interceptor in new development instead of the `post_delete_url_list` interceptor. + When both interceptors are used, this `post_delete_url_list_with_metadata` interceptor runs after the + `post_delete_url_list` interceptor. The (possibly modified) response returned by + `post_delete_url_list` will be passed to + `post_delete_url_list_with_metadata`. + """ + return response, metadata + + def pre_get_authorization_policy( + self, + request: authorization_policy.GetAuthorizationPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authorization_policy.GetAuthorizationPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_authorization_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_authorization_policy( + self, response: authorization_policy.AuthorizationPolicy + ) -> authorization_policy.AuthorizationPolicy: + """Post-rpc interceptor for get_authorization_policy + + DEPRECATED. Please use the `post_get_authorization_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_authorization_policy` interceptor runs + before the `post_get_authorization_policy_with_metadata` interceptor. + """ + return response + + def post_get_authorization_policy_with_metadata( + self, + response: authorization_policy.AuthorizationPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authorization_policy.AuthorizationPolicy, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_authorization_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_authorization_policy_with_metadata` + interceptor in new development instead of the `post_get_authorization_policy` interceptor. + When both interceptors are used, this `post_get_authorization_policy_with_metadata` interceptor runs after the + `post_get_authorization_policy` interceptor. The (possibly modified) response returned by + `post_get_authorization_policy` will be passed to + `post_get_authorization_policy_with_metadata`. + """ + return response, metadata + + def pre_get_authz_policy( + self, + request: authz_policy.GetAuthzPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authz_policy.GetAuthzPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_authz_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_authz_policy( + self, response: authz_policy.AuthzPolicy + ) -> authz_policy.AuthzPolicy: + """Post-rpc interceptor for get_authz_policy + + DEPRECATED. Please use the `post_get_authz_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_authz_policy` interceptor runs + before the `post_get_authz_policy_with_metadata` interceptor. + """ + return response + + def post_get_authz_policy_with_metadata( + self, + response: authz_policy.AuthzPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[authz_policy.AuthzPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_authz_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_authz_policy_with_metadata` + interceptor in new development instead of the `post_get_authz_policy` interceptor. + When both interceptors are used, this `post_get_authz_policy_with_metadata` interceptor runs after the + `post_get_authz_policy` interceptor. The (possibly modified) response returned by + `post_get_authz_policy` will be passed to + `post_get_authz_policy_with_metadata`. + """ + return response, metadata + + def pre_get_backend_authentication_config( + self, + request: backend_authentication_config.GetBackendAuthenticationConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backend_authentication_config.GetBackendAuthenticationConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_backend_authentication_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_backend_authentication_config( + self, response: backend_authentication_config.BackendAuthenticationConfig + ) -> backend_authentication_config.BackendAuthenticationConfig: + """Post-rpc interceptor for get_backend_authentication_config + + DEPRECATED. Please use the `post_get_backend_authentication_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_backend_authentication_config` interceptor runs + before the `post_get_backend_authentication_config_with_metadata` interceptor. + """ + return response + + def post_get_backend_authentication_config_with_metadata( + self, + response: backend_authentication_config.BackendAuthenticationConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backend_authentication_config.BackendAuthenticationConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_backend_authentication_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_backend_authentication_config_with_metadata` + interceptor in new development instead of the `post_get_backend_authentication_config` interceptor. + When both interceptors are used, this `post_get_backend_authentication_config_with_metadata` interceptor runs after the + `post_get_backend_authentication_config` interceptor. The (possibly modified) response returned by + `post_get_backend_authentication_config` will be passed to + `post_get_backend_authentication_config_with_metadata`. + """ + return response, metadata + + def pre_get_client_tls_policy( + self, + request: client_tls_policy.GetClientTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_tls_policy.GetClientTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_client_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_client_tls_policy( + self, response: client_tls_policy.ClientTlsPolicy + ) -> client_tls_policy.ClientTlsPolicy: + """Post-rpc interceptor for get_client_tls_policy + + DEPRECATED. Please use the `post_get_client_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_client_tls_policy` interceptor runs + before the `post_get_client_tls_policy_with_metadata` interceptor. + """ + return response + + def post_get_client_tls_policy_with_metadata( + self, + response: client_tls_policy.ClientTlsPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_tls_policy.ClientTlsPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_client_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_client_tls_policy_with_metadata` + interceptor in new development instead of the `post_get_client_tls_policy` interceptor. + When both interceptors are used, this `post_get_client_tls_policy_with_metadata` interceptor runs after the + `post_get_client_tls_policy` interceptor. The (possibly modified) response returned by + `post_get_client_tls_policy` will be passed to + `post_get_client_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_get_gateway_security_policy( + self, + request: gateway_security_policy.GetGatewaySecurityPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy.GetGatewaySecurityPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_gateway_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_gateway_security_policy( + self, response: gateway_security_policy.GatewaySecurityPolicy + ) -> gateway_security_policy.GatewaySecurityPolicy: + """Post-rpc interceptor for get_gateway_security_policy + + DEPRECATED. Please use the `post_get_gateway_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_gateway_security_policy` interceptor runs + before the `post_get_gateway_security_policy_with_metadata` interceptor. + """ + return response + + def post_get_gateway_security_policy_with_metadata( + self, + response: gateway_security_policy.GatewaySecurityPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy.GatewaySecurityPolicy, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_gateway_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_gateway_security_policy_with_metadata` + interceptor in new development instead of the `post_get_gateway_security_policy` interceptor. + When both interceptors are used, this `post_get_gateway_security_policy_with_metadata` interceptor runs after the + `post_get_gateway_security_policy` interceptor. The (possibly modified) response returned by + `post_get_gateway_security_policy` will be passed to + `post_get_gateway_security_policy_with_metadata`. + """ + return response, metadata + + def pre_get_gateway_security_policy_rule( + self, + request: gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_gateway_security_policy_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_gateway_security_policy_rule( + self, response: gateway_security_policy_rule.GatewaySecurityPolicyRule + ) -> gateway_security_policy_rule.GatewaySecurityPolicyRule: + """Post-rpc interceptor for get_gateway_security_policy_rule + + DEPRECATED. Please use the `post_get_gateway_security_policy_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_gateway_security_policy_rule` interceptor runs + before the `post_get_gateway_security_policy_rule_with_metadata` interceptor. + """ + return response + + def post_get_gateway_security_policy_rule_with_metadata( + self, + response: gateway_security_policy_rule.GatewaySecurityPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy_rule.GatewaySecurityPolicyRule, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_gateway_security_policy_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_gateway_security_policy_rule_with_metadata` + interceptor in new development instead of the `post_get_gateway_security_policy_rule` interceptor. + When both interceptors are used, this `post_get_gateway_security_policy_rule_with_metadata` interceptor runs after the + `post_get_gateway_security_policy_rule` interceptor. The (possibly modified) response returned by + `post_get_gateway_security_policy_rule` will be passed to + `post_get_gateway_security_policy_rule_with_metadata`. + """ + return response, metadata + + def pre_get_server_tls_policy( + self, + request: server_tls_policy.GetServerTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + server_tls_policy.GetServerTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_server_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_server_tls_policy( + self, response: server_tls_policy.ServerTlsPolicy + ) -> server_tls_policy.ServerTlsPolicy: + """Post-rpc interceptor for get_server_tls_policy + + DEPRECATED. Please use the `post_get_server_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_server_tls_policy` interceptor runs + before the `post_get_server_tls_policy_with_metadata` interceptor. + """ + return response + + def post_get_server_tls_policy_with_metadata( + self, + response: server_tls_policy.ServerTlsPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + server_tls_policy.ServerTlsPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_server_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_server_tls_policy_with_metadata` + interceptor in new development instead of the `post_get_server_tls_policy` interceptor. + When both interceptors are used, this `post_get_server_tls_policy_with_metadata` interceptor runs after the + `post_get_server_tls_policy` interceptor. The (possibly modified) response returned by + `post_get_server_tls_policy` will be passed to + `post_get_server_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_get_tls_inspection_policy( + self, + request: tls_inspection_policy.GetTlsInspectionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + tls_inspection_policy.GetTlsInspectionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_tls_inspection_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_tls_inspection_policy( + self, response: tls_inspection_policy.TlsInspectionPolicy + ) -> tls_inspection_policy.TlsInspectionPolicy: + """Post-rpc interceptor for get_tls_inspection_policy + + DEPRECATED. Please use the `post_get_tls_inspection_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_tls_inspection_policy` interceptor runs + before the `post_get_tls_inspection_policy_with_metadata` interceptor. + """ + return response + + def post_get_tls_inspection_policy_with_metadata( + self, + response: tls_inspection_policy.TlsInspectionPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + tls_inspection_policy.TlsInspectionPolicy, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_tls_inspection_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_tls_inspection_policy_with_metadata` + interceptor in new development instead of the `post_get_tls_inspection_policy` interceptor. + When both interceptors are used, this `post_get_tls_inspection_policy_with_metadata` interceptor runs after the + `post_get_tls_inspection_policy` interceptor. The (possibly modified) response returned by + `post_get_tls_inspection_policy` will be passed to + `post_get_tls_inspection_policy_with_metadata`. + """ + return response, metadata + + def pre_get_url_list( + self, + request: url_list.GetUrlListRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[url_list.GetUrlListRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_url_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_url_list(self, response: url_list.UrlList) -> url_list.UrlList: + """Post-rpc interceptor for get_url_list + + DEPRECATED. Please use the `post_get_url_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_get_url_list` interceptor runs + before the `post_get_url_list_with_metadata` interceptor. + """ + return response + + def post_get_url_list_with_metadata( + self, + response: url_list.UrlList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[url_list.UrlList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_url_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_get_url_list_with_metadata` + interceptor in new development instead of the `post_get_url_list` interceptor. + When both interceptors are used, this `post_get_url_list_with_metadata` interceptor runs after the + `post_get_url_list` interceptor. The (possibly modified) response returned by + `post_get_url_list` will be passed to + `post_get_url_list_with_metadata`. + """ + return response, metadata + + def pre_list_authorization_policies( + self, + request: authorization_policy.ListAuthorizationPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authorization_policy.ListAuthorizationPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_authorization_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_authorization_policies( + self, response: authorization_policy.ListAuthorizationPoliciesResponse + ) -> authorization_policy.ListAuthorizationPoliciesResponse: + """Post-rpc interceptor for list_authorization_policies + + DEPRECATED. Please use the `post_list_authorization_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_authorization_policies` interceptor runs + before the `post_list_authorization_policies_with_metadata` interceptor. + """ + return response + + def post_list_authorization_policies_with_metadata( + self, + response: authorization_policy.ListAuthorizationPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authorization_policy.ListAuthorizationPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_authorization_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_authorization_policies_with_metadata` + interceptor in new development instead of the `post_list_authorization_policies` interceptor. + When both interceptors are used, this `post_list_authorization_policies_with_metadata` interceptor runs after the + `post_list_authorization_policies` interceptor. The (possibly modified) response returned by + `post_list_authorization_policies` will be passed to + `post_list_authorization_policies_with_metadata`. + """ + return response, metadata + + def pre_list_authz_policies( + self, + request: authz_policy.ListAuthzPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authz_policy.ListAuthzPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_authz_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_authz_policies( + self, response: authz_policy.ListAuthzPoliciesResponse + ) -> authz_policy.ListAuthzPoliciesResponse: + """Post-rpc interceptor for list_authz_policies + + DEPRECATED. Please use the `post_list_authz_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_authz_policies` interceptor runs + before the `post_list_authz_policies_with_metadata` interceptor. + """ + return response + + def post_list_authz_policies_with_metadata( + self, + response: authz_policy.ListAuthzPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + authz_policy.ListAuthzPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_authz_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_authz_policies_with_metadata` + interceptor in new development instead of the `post_list_authz_policies` interceptor. + When both interceptors are used, this `post_list_authz_policies_with_metadata` interceptor runs after the + `post_list_authz_policies` interceptor. The (possibly modified) response returned by + `post_list_authz_policies` will be passed to + `post_list_authz_policies_with_metadata`. + """ + return response, metadata + + def pre_list_backend_authentication_configs( + self, + request: backend_authentication_config.ListBackendAuthenticationConfigsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_backend_authentication_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_backend_authentication_configs( + self, + response: backend_authentication_config.ListBackendAuthenticationConfigsResponse, + ) -> backend_authentication_config.ListBackendAuthenticationConfigsResponse: + """Post-rpc interceptor for list_backend_authentication_configs + + DEPRECATED. Please use the `post_list_backend_authentication_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_backend_authentication_configs` interceptor runs + before the `post_list_backend_authentication_configs_with_metadata` interceptor. + """ + return response + + def post_list_backend_authentication_configs_with_metadata( + self, + response: backend_authentication_config.ListBackendAuthenticationConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backend_authentication_config.ListBackendAuthenticationConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_backend_authentication_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_backend_authentication_configs_with_metadata` + interceptor in new development instead of the `post_list_backend_authentication_configs` interceptor. + When both interceptors are used, this `post_list_backend_authentication_configs_with_metadata` interceptor runs after the + `post_list_backend_authentication_configs` interceptor. The (possibly modified) response returned by + `post_list_backend_authentication_configs` will be passed to + `post_list_backend_authentication_configs_with_metadata`. + """ + return response, metadata + + def pre_list_client_tls_policies( + self, + request: client_tls_policy.ListClientTlsPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_tls_policy.ListClientTlsPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_client_tls_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_client_tls_policies( + self, response: client_tls_policy.ListClientTlsPoliciesResponse + ) -> client_tls_policy.ListClientTlsPoliciesResponse: + """Post-rpc interceptor for list_client_tls_policies + + DEPRECATED. Please use the `post_list_client_tls_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_client_tls_policies` interceptor runs + before the `post_list_client_tls_policies_with_metadata` interceptor. + """ + return response + + def post_list_client_tls_policies_with_metadata( + self, + response: client_tls_policy.ListClientTlsPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_tls_policy.ListClientTlsPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_client_tls_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_client_tls_policies_with_metadata` + interceptor in new development instead of the `post_list_client_tls_policies` interceptor. + When both interceptors are used, this `post_list_client_tls_policies_with_metadata` interceptor runs after the + `post_list_client_tls_policies` interceptor. The (possibly modified) response returned by + `post_list_client_tls_policies` will be passed to + `post_list_client_tls_policies_with_metadata`. + """ + return response, metadata + + def pre_list_gateway_security_policies( + self, + request: gateway_security_policy.ListGatewaySecurityPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy.ListGatewaySecurityPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_gateway_security_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_gateway_security_policies( + self, response: gateway_security_policy.ListGatewaySecurityPoliciesResponse + ) -> gateway_security_policy.ListGatewaySecurityPoliciesResponse: + """Post-rpc interceptor for list_gateway_security_policies + + DEPRECATED. Please use the `post_list_gateway_security_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_gateway_security_policies` interceptor runs + before the `post_list_gateway_security_policies_with_metadata` interceptor. + """ + return response + + def post_list_gateway_security_policies_with_metadata( + self, + response: gateway_security_policy.ListGatewaySecurityPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy.ListGatewaySecurityPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_gateway_security_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_gateway_security_policies_with_metadata` + interceptor in new development instead of the `post_list_gateway_security_policies` interceptor. + When both interceptors are used, this `post_list_gateway_security_policies_with_metadata` interceptor runs after the + `post_list_gateway_security_policies` interceptor. The (possibly modified) response returned by + `post_list_gateway_security_policies` will be passed to + `post_list_gateway_security_policies_with_metadata`. + """ + return response, metadata + + def pre_list_gateway_security_policy_rules( + self, + request: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_gateway_security_policy_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_gateway_security_policy_rules( + self, + response: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + ) -> gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse: + """Post-rpc interceptor for list_gateway_security_policy_rules + + DEPRECATED. Please use the `post_list_gateway_security_policy_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_gateway_security_policy_rules` interceptor runs + before the `post_list_gateway_security_policy_rules_with_metadata` interceptor. + """ + return response + + def post_list_gateway_security_policy_rules_with_metadata( + self, + response: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_gateway_security_policy_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_gateway_security_policy_rules_with_metadata` + interceptor in new development instead of the `post_list_gateway_security_policy_rules` interceptor. + When both interceptors are used, this `post_list_gateway_security_policy_rules_with_metadata` interceptor runs after the + `post_list_gateway_security_policy_rules` interceptor. The (possibly modified) response returned by + `post_list_gateway_security_policy_rules` will be passed to + `post_list_gateway_security_policy_rules_with_metadata`. + """ + return response, metadata + + def pre_list_server_tls_policies( + self, + request: server_tls_policy.ListServerTlsPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + server_tls_policy.ListServerTlsPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_server_tls_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_server_tls_policies( + self, response: server_tls_policy.ListServerTlsPoliciesResponse + ) -> server_tls_policy.ListServerTlsPoliciesResponse: + """Post-rpc interceptor for list_server_tls_policies + + DEPRECATED. Please use the `post_list_server_tls_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_server_tls_policies` interceptor runs + before the `post_list_server_tls_policies_with_metadata` interceptor. + """ + return response + + def post_list_server_tls_policies_with_metadata( + self, + response: server_tls_policy.ListServerTlsPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + server_tls_policy.ListServerTlsPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_server_tls_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_server_tls_policies_with_metadata` + interceptor in new development instead of the `post_list_server_tls_policies` interceptor. + When both interceptors are used, this `post_list_server_tls_policies_with_metadata` interceptor runs after the + `post_list_server_tls_policies` interceptor. The (possibly modified) response returned by + `post_list_server_tls_policies` will be passed to + `post_list_server_tls_policies_with_metadata`. + """ + return response, metadata + + def pre_list_tls_inspection_policies( + self, + request: tls_inspection_policy.ListTlsInspectionPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + tls_inspection_policy.ListTlsInspectionPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_tls_inspection_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_tls_inspection_policies( + self, response: tls_inspection_policy.ListTlsInspectionPoliciesResponse + ) -> tls_inspection_policy.ListTlsInspectionPoliciesResponse: + """Post-rpc interceptor for list_tls_inspection_policies + + DEPRECATED. Please use the `post_list_tls_inspection_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_tls_inspection_policies` interceptor runs + before the `post_list_tls_inspection_policies_with_metadata` interceptor. + """ + return response + + def post_list_tls_inspection_policies_with_metadata( + self, + response: tls_inspection_policy.ListTlsInspectionPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + tls_inspection_policy.ListTlsInspectionPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_tls_inspection_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_tls_inspection_policies_with_metadata` + interceptor in new development instead of the `post_list_tls_inspection_policies` interceptor. + When both interceptors are used, this `post_list_tls_inspection_policies_with_metadata` interceptor runs after the + `post_list_tls_inspection_policies` interceptor. The (possibly modified) response returned by + `post_list_tls_inspection_policies` will be passed to + `post_list_tls_inspection_policies_with_metadata`. + """ + return response, metadata + + def pre_list_url_lists( + self, + request: url_list.ListUrlListsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[url_list.ListUrlListsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_url_lists + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_url_lists( + self, response: url_list.ListUrlListsResponse + ) -> url_list.ListUrlListsResponse: + """Post-rpc interceptor for list_url_lists + + DEPRECATED. Please use the `post_list_url_lists_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_list_url_lists` interceptor runs + before the `post_list_url_lists_with_metadata` interceptor. + """ + return response + + def post_list_url_lists_with_metadata( + self, + response: url_list.ListUrlListsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[url_list.ListUrlListsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_url_lists + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_list_url_lists_with_metadata` + interceptor in new development instead of the `post_list_url_lists` interceptor. + When both interceptors are used, this `post_list_url_lists_with_metadata` interceptor runs after the + `post_list_url_lists` interceptor. The (possibly modified) response returned by + `post_list_url_lists` will be passed to + `post_list_url_lists_with_metadata`. + """ + return response, metadata + + def pre_update_authorization_policy( + self, + request: gcn_authorization_policy.UpdateAuthorizationPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_authorization_policy.UpdateAuthorizationPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_authorization_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_authorization_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_authorization_policy + + DEPRECATED. Please use the `post_update_authorization_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_authorization_policy` interceptor runs + before the `post_update_authorization_policy_with_metadata` interceptor. + """ + return response + + def post_update_authorization_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_authorization_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_authorization_policy_with_metadata` + interceptor in new development instead of the `post_update_authorization_policy` interceptor. + When both interceptors are used, this `post_update_authorization_policy_with_metadata` interceptor runs after the + `post_update_authorization_policy` interceptor. The (possibly modified) response returned by + `post_update_authorization_policy` will be passed to + `post_update_authorization_policy_with_metadata`. + """ + return response, metadata + + def pre_update_authz_policy( + self, + request: gcn_authz_policy.UpdateAuthzPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_authz_policy.UpdateAuthzPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_authz_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_authz_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_authz_policy + + DEPRECATED. Please use the `post_update_authz_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_authz_policy` interceptor runs + before the `post_update_authz_policy_with_metadata` interceptor. + """ + return response + + def post_update_authz_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_authz_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_authz_policy_with_metadata` + interceptor in new development instead of the `post_update_authz_policy` interceptor. + When both interceptors are used, this `post_update_authz_policy_with_metadata` interceptor runs after the + `post_update_authz_policy` interceptor. The (possibly modified) response returned by + `post_update_authz_policy` will be passed to + `post_update_authz_policy_with_metadata`. + """ + return response, metadata + + def pre_update_backend_authentication_config( + self, + request: gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_backend_authentication_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_backend_authentication_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backend_authentication_config + + DEPRECATED. Please use the `post_update_backend_authentication_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_backend_authentication_config` interceptor runs + before the `post_update_backend_authentication_config_with_metadata` interceptor. + """ + return response + + def post_update_backend_authentication_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backend_authentication_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_backend_authentication_config_with_metadata` + interceptor in new development instead of the `post_update_backend_authentication_config` interceptor. + When both interceptors are used, this `post_update_backend_authentication_config_with_metadata` interceptor runs after the + `post_update_backend_authentication_config` interceptor. The (possibly modified) response returned by + `post_update_backend_authentication_config` will be passed to + `post_update_backend_authentication_config_with_metadata`. + """ + return response, metadata + + def pre_update_client_tls_policy( + self, + request: gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_client_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_client_tls_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_client_tls_policy + + DEPRECATED. Please use the `post_update_client_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_client_tls_policy` interceptor runs + before the `post_update_client_tls_policy_with_metadata` interceptor. + """ + return response + + def post_update_client_tls_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_client_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_client_tls_policy_with_metadata` + interceptor in new development instead of the `post_update_client_tls_policy` interceptor. + When both interceptors are used, this `post_update_client_tls_policy_with_metadata` interceptor runs after the + `post_update_client_tls_policy` interceptor. The (possibly modified) response returned by + `post_update_client_tls_policy` will be passed to + `post_update_client_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_update_gateway_security_policy( + self, + request: gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_gateway_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_gateway_security_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_gateway_security_policy + + DEPRECATED. Please use the `post_update_gateway_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_gateway_security_policy` interceptor runs + before the `post_update_gateway_security_policy_with_metadata` interceptor. + """ + return response + + def post_update_gateway_security_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_gateway_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_gateway_security_policy_with_metadata` + interceptor in new development instead of the `post_update_gateway_security_policy` interceptor. + When both interceptors are used, this `post_update_gateway_security_policy_with_metadata` interceptor runs after the + `post_update_gateway_security_policy` interceptor. The (possibly modified) response returned by + `post_update_gateway_security_policy` will be passed to + `post_update_gateway_security_policy_with_metadata`. + """ + return response, metadata + + def pre_update_gateway_security_policy_rule( + self, + request: gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_gateway_security_policy_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_gateway_security_policy_rule( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_gateway_security_policy_rule + + DEPRECATED. Please use the `post_update_gateway_security_policy_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_gateway_security_policy_rule` interceptor runs + before the `post_update_gateway_security_policy_rule_with_metadata` interceptor. + """ + return response + + def post_update_gateway_security_policy_rule_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_gateway_security_policy_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_gateway_security_policy_rule_with_metadata` + interceptor in new development instead of the `post_update_gateway_security_policy_rule` interceptor. + When both interceptors are used, this `post_update_gateway_security_policy_rule_with_metadata` interceptor runs after the + `post_update_gateway_security_policy_rule` interceptor. The (possibly modified) response returned by + `post_update_gateway_security_policy_rule` will be passed to + `post_update_gateway_security_policy_rule_with_metadata`. + """ + return response, metadata + + def pre_update_server_tls_policy( + self, + request: gcn_server_tls_policy.UpdateServerTlsPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_server_tls_policy.UpdateServerTlsPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_server_tls_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_server_tls_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_server_tls_policy + + DEPRECATED. Please use the `post_update_server_tls_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_server_tls_policy` interceptor runs + before the `post_update_server_tls_policy_with_metadata` interceptor. + """ + return response + + def post_update_server_tls_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_server_tls_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_server_tls_policy_with_metadata` + interceptor in new development instead of the `post_update_server_tls_policy` interceptor. + When both interceptors are used, this `post_update_server_tls_policy_with_metadata` interceptor runs after the + `post_update_server_tls_policy` interceptor. The (possibly modified) response returned by + `post_update_server_tls_policy` will be passed to + `post_update_server_tls_policy_with_metadata`. + """ + return response, metadata + + def pre_update_tls_inspection_policy( + self, + request: gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_tls_inspection_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_tls_inspection_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_tls_inspection_policy + + DEPRECATED. Please use the `post_update_tls_inspection_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_tls_inspection_policy` interceptor runs + before the `post_update_tls_inspection_policy_with_metadata` interceptor. + """ + return response + + def post_update_tls_inspection_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tls_inspection_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_tls_inspection_policy_with_metadata` + interceptor in new development instead of the `post_update_tls_inspection_policy` interceptor. + When both interceptors are used, this `post_update_tls_inspection_policy_with_metadata` interceptor runs after the + `post_update_tls_inspection_policy` interceptor. The (possibly modified) response returned by + `post_update_tls_inspection_policy` will be passed to + `post_update_tls_inspection_policy_with_metadata`. + """ + return response, metadata + + def pre_update_url_list( + self, + request: gcn_url_list.UpdateUrlListRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_url_list.UpdateUrlListRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_url_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_update_url_list( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_url_list + + DEPRECATED. Please use the `post_update_url_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. This `post_update_url_list` interceptor runs + before the `post_update_url_list_with_metadata` interceptor. + """ + return response + + def post_update_url_list_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_url_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkSecurity server but before it is returned to user code. + + We recommend only using this `post_update_url_list_with_metadata` + interceptor in new development instead of the `post_update_url_list` interceptor. + When both interceptors are used, this `post_update_url_list_with_metadata` interceptor runs after the + `post_update_url_list` interceptor. The (possibly modified) response returned by + `post_update_url_list` will be passed to + `post_update_url_list_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkSecurity server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the NetworkSecurity server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworkSecurityRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworkSecurityRestInterceptor + + +class NetworkSecurityRestTransport(_BaseNetworkSecurityRestTransport): + """REST backend synchronous transport for NetworkSecurity. + + Network Security API provides resources to configure + authentication and authorization policies. Refer to per API + resource documentation for more information. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[NetworkSecurityRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworkSecurityRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ { - "method": "post", - "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", - "body": "*", + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAuthorizationPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateAuthorizationPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_authorization_policy.CreateAuthorizationPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create authorization + policy method over HTTP. + + Args: + request (~.gcn_authorization_policy.CreateAuthorizationPolicyRequest): + The request object. Request used by the + CreateAuthorizationPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_authorization_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateAuthorizationPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateAuthorizationPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._CreateAuthorizationPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_authorization_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_authorization_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_authorization_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateAuthorizationPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateAuthzPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateAuthzPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_authz_policy.CreateAuthzPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create authz policy method over HTTP. + + Args: + request (~.gcn_authz_policy.CreateAuthzPolicyRequest): + The request object. Message for creating an ``AuthzPolicy`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_authz_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateAuthzPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateAuthzPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._CreateAuthzPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_authz_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_authz_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_authz_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateAuthzPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateBackendAuthenticationConfig( + _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash( + "NetworkSecurityRestTransport.CreateBackendAuthenticationConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backend + authentication config method over HTTP. + + Args: + request (~.gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest): + The request object. Request used by the + CreateBackendAuthenticationConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_create_backend_authentication_config( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateBackendAuthenticationConfig", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateBackendAuthenticationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._CreateBackendAuthenticationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_backend_authentication_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_backend_authentication_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_backend_authentication_config", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateBackendAuthenticationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateClientTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateClientTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_client_tls_policy.CreateClientTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create client tls policy method over HTTP. + + Args: + request (~.gcn_client_tls_policy.CreateClientTlsPolicyRequest): + The request object. Request used by the + CreateClientTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_client_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateClientTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateClientTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._CreateClientTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_client_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_client_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_client_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateClientTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGatewaySecurityPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateGatewaySecurityPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create gateway security + policy method over HTTP. + + Args: + request (~.gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest): + The request object. Request used by the + CreateGatewaySecurityPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_gateway_security_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateGatewaySecurityPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateGatewaySecurityPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._CreateGatewaySecurityPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_gateway_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_gateway_security_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_gateway_security_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateGatewaySecurityPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGatewaySecurityPolicyRule( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateGatewaySecurityPolicyRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create gateway security + policy rule method over HTTP. + + Args: + request (~.gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest): + The request object. Methods for GatewaySecurityPolicy + RULES/GatewaySecurityPolicyRules. + Request used by the + CreateGatewaySecurityPolicyRule method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_create_gateway_security_policy_rule( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateGatewaySecurityPolicyRule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateGatewaySecurityPolicyRule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._CreateGatewaySecurityPolicyRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_gateway_security_policy_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_gateway_security_policy_rule_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_gateway_security_policy_rule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateGatewaySecurityPolicyRule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateServerTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateServerTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_server_tls_policy.CreateServerTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create server tls policy method over HTTP. + + Args: + request (~.gcn_server_tls_policy.CreateServerTlsPolicyRequest): + The request object. Request used by the + CreateServerTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_server_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateServerTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateServerTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._CreateServerTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_server_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_server_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_server_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateServerTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateTlsInspectionPolicy( + _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateTlsInspectionPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create tls inspection + policy method over HTTP. + + Args: + request (~.gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest): + The request object. Request used by the + CreateTlsInspectionPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_tls_inspection_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateTlsInspectionPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateTlsInspectionPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._CreateTlsInspectionPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_tls_inspection_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tls_inspection_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_tls_inspection_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateTlsInspectionPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateUrlList( + _BaseNetworkSecurityRestTransport._BaseCreateUrlList, NetworkSecurityRestStub + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.CreateUrlList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_url_list.CreateUrlListRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create url list method over HTTP. + + Args: + request (~.gcn_url_list.CreateUrlListRequest): + The request object. Request used by the CreateUrlList + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseCreateUrlList._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_url_list(request, metadata) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateUrlList._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseCreateUrlList._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseCreateUrlList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateUrlList", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateUrlList", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._CreateUrlList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_url_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_url_list_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_url_list", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "CreateUrlList", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAuthorizationPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthorizationPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteAuthorizationPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authorization_policy.DeleteAuthorizationPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete authorization + policy method over HTTP. + + Args: + request (~.authorization_policy.DeleteAuthorizationPolicyRequest): + The request object. Request used by the + DeleteAuthorizationPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthorizationPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_authorization_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteAuthorizationPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteAuthorizationPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteAuthorizationPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteAuthorizationPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._DeleteAuthorizationPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_authorization_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_authorization_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_authorization_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteAuthorizationPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAuthzPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthzPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteAuthzPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authz_policy.DeleteAuthzPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete authz policy method over HTTP. + + Args: + request (~.authz_policy.DeleteAuthzPolicyRequest): + The request object. Message for deleting an ``AuthzPolicy`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthzPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_authz_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteAuthzPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteAuthzPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteAuthzPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteAuthzPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._DeleteAuthzPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_authz_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_authz_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_authz_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteAuthzPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteBackendAuthenticationConfig( + _BaseNetworkSecurityRestTransport._BaseDeleteBackendAuthenticationConfig, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash( + "NetworkSecurityRestTransport.DeleteBackendAuthenticationConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backend + authentication config method over HTTP. + + Args: + request (~.backend_authentication_config.DeleteBackendAuthenticationConfigRequest): + The request object. Request used by the + DeleteBackendAuthenticationConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteBackendAuthenticationConfig._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_delete_backend_authentication_config( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteBackendAuthenticationConfig._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteBackendAuthenticationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteBackendAuthenticationConfig", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteBackendAuthenticationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._DeleteBackendAuthenticationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_backend_authentication_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_backend_authentication_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_backend_authentication_config", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteBackendAuthenticationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteClientTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteClientTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: client_tls_policy.DeleteClientTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete client tls policy method over HTTP. + + Args: + request (~.client_tls_policy.DeleteClientTlsPolicyRequest): + The request object. Request used by the + DeleteClientTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_client_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteClientTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteClientTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._DeleteClientTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_client_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_client_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_client_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteClientTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGatewaySecurityPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteGatewaySecurityPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy.DeleteGatewaySecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete gateway security + policy method over HTTP. + + Args: + request (~.gateway_security_policy.DeleteGatewaySecurityPolicyRequest): + The request object. Request used by the + DeleteGatewaySecurityPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_gateway_security_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteGatewaySecurityPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteGatewaySecurityPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._DeleteGatewaySecurityPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_gateway_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_gateway_security_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_gateway_security_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteGatewaySecurityPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGatewaySecurityPolicyRule( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicyRule, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteGatewaySecurityPolicyRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete gateway security + policy rule method over HTTP. + + Args: + request (~.gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest): + The request object. Request used by the + DeleteGatewaySecurityPolicyRule method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicyRule._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_delete_gateway_security_policy_rule( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicyRule._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicyRule._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteGatewaySecurityPolicyRule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteGatewaySecurityPolicyRule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._DeleteGatewaySecurityPolicyRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_gateway_security_policy_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_gateway_security_policy_rule_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_gateway_security_policy_rule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteGatewaySecurityPolicyRule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteServerTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteServerTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteServerTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: server_tls_policy.DeleteServerTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete server tls policy method over HTTP. + + Args: + request (~.server_tls_policy.DeleteServerTlsPolicyRequest): + The request object. Request used by the + DeleteServerTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteServerTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_server_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteServerTlsPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteServerTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteServerTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteServerTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._DeleteServerTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_server_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_server_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_server_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteServerTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteTlsInspectionPolicy( + _BaseNetworkSecurityRestTransport._BaseDeleteTlsInspectionPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteTlsInspectionPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: tls_inspection_policy.DeleteTlsInspectionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete tls inspection + policy method over HTTP. + + Args: + request (~.tls_inspection_policy.DeleteTlsInspectionPolicyRequest): + The request object. Request used by the + DeleteTlsInspectionPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteTlsInspectionPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_tls_inspection_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteTlsInspectionPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteTlsInspectionPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteTlsInspectionPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteTlsInspectionPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._DeleteTlsInspectionPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_tls_inspection_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_tls_inspection_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_tls_inspection_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteTlsInspectionPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteUrlList( + _BaseNetworkSecurityRestTransport._BaseDeleteUrlList, NetworkSecurityRestStub + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.DeleteUrlList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: url_list.DeleteUrlListRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete url list method over HTTP. + + Args: + request (~.url_list.DeleteUrlListRequest): + The request object. Request used by the DeleteUrlList + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseDeleteUrlList._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_url_list(request, metadata) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteUrlList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseDeleteUrlList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteUrlList", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteUrlList", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._DeleteUrlList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_url_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_url_list_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_url_list", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "DeleteUrlList", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAuthorizationPolicy( + _BaseNetworkSecurityRestTransport._BaseGetAuthorizationPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetAuthorizationPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authorization_policy.GetAuthorizationPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authorization_policy.AuthorizationPolicy: + r"""Call the get authorization policy method over HTTP. + + Args: + request (~.authorization_policy.GetAuthorizationPolicyRequest): + The request object. Request used by the + GetAuthorizationPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.authorization_policy.AuthorizationPolicy: + AuthorizationPolicy is a resource + that specifies how a server should + authorize incoming connections. This + resource in itself does not change the + configuration unless it's attached to a + target https proxy or endpoint config + selector resource. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetAuthorizationPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_authorization_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetAuthorizationPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetAuthorizationPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetAuthorizationPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetAuthorizationPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._GetAuthorizationPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = authorization_policy.AuthorizationPolicy() + pb_resp = authorization_policy.AuthorizationPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_authorization_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_authorization_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = authorization_policy.AuthorizationPolicy.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_authorization_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetAuthorizationPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAuthzPolicy( + _BaseNetworkSecurityRestTransport._BaseGetAuthzPolicy, NetworkSecurityRestStub + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetAuthzPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authz_policy.GetAuthzPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authz_policy.AuthzPolicy: + r"""Call the get authz policy method over HTTP. + + Args: + request (~.authz_policy.GetAuthzPolicyRequest): + The request object. Message for getting a ``AuthzPolicy`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.authz_policy.AuthzPolicy: + ``AuthzPolicy`` is a resource that allows to forward + traffic to a callout backend designed to scan the + traffic for security purposes. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetAuthzPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_authz_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetAuthzPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetAuthzPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetAuthzPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetAuthzPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetAuthzPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = authz_policy.AuthzPolicy() + pb_resp = authz_policy.AuthzPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_authz_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_authz_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = authz_policy.AuthzPolicy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_authz_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetAuthzPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetBackendAuthenticationConfig( + _BaseNetworkSecurityRestTransport._BaseGetBackendAuthenticationConfig, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetBackendAuthenticationConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backend_authentication_config.GetBackendAuthenticationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backend_authentication_config.BackendAuthenticationConfig: + r"""Call the get backend + authentication config method over HTTP. + + Args: + request (~.backend_authentication_config.GetBackendAuthenticationConfigRequest): + The request object. Request used by the + GetBackendAuthenticationConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backend_authentication_config.BackendAuthenticationConfig: + BackendAuthenticationConfig message groups the + TrustConfig together with other settings that control + how the load balancer authenticates, and expresses its + identity to, the backend: + + - ``trustConfig`` is the attached TrustConfig. + + - ``wellKnownRoots`` indicates whether the load balance + should trust backend server certificates that are + issued by public certificate authorities, in addition + to certificates trusted by the TrustConfig. + + - ``clientCertificate`` is a client certificate that the + load balancer uses to express its identity to the + backend, if the connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the + load balancer's BackendService directly determining how + that BackendService negotiates TLS. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetBackendAuthenticationConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_backend_authentication_config( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetBackendAuthenticationConfig._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetBackendAuthenticationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetBackendAuthenticationConfig", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetBackendAuthenticationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetBackendAuthenticationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backend_authentication_config.BackendAuthenticationConfig() + pb_resp = backend_authentication_config.BackendAuthenticationConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backend_authentication_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_backend_authentication_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backend_authentication_config.BackendAuthenticationConfig.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_backend_authentication_config", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetBackendAuthenticationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetClientTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetClientTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: client_tls_policy.GetClientTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> client_tls_policy.ClientTlsPolicy: + r"""Call the get client tls policy method over HTTP. + + Args: + request (~.client_tls_policy.GetClientTlsPolicyRequest): + The request object. Request used by the + GetClientTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.client_tls_policy.ClientTlsPolicy: + ClientTlsPolicy is a resource that + specifies how a client should + authenticate connections to backends of + a service. This resource itself does not + affect configuration unless it is + attached to a backend service resource. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_client_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetClientTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetClientTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetClientTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = client_tls_policy.ClientTlsPolicy() + pb_resp = client_tls_policy.ClientTlsPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_client_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_client_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = client_tls_policy.ClientTlsPolicy.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_client_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetClientTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGatewaySecurityPolicy( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetGatewaySecurityPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy.GetGatewaySecurityPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy.GatewaySecurityPolicy: + r"""Call the get gateway security + policy method over HTTP. + + Args: + request (~.gateway_security_policy.GetGatewaySecurityPolicyRequest): + The request object. Request used by the + GetGatewaySecurityPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gateway_security_policy.GatewaySecurityPolicy: + The GatewaySecurityPolicy resource + contains a collection of + GatewaySecurityPolicyRules and + associated metadata. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_gateway_security_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetGatewaySecurityPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetGatewaySecurityPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._GetGatewaySecurityPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gateway_security_policy.GatewaySecurityPolicy() + pb_resp = gateway_security_policy.GatewaySecurityPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_gateway_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_gateway_security_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gateway_security_policy.GatewaySecurityPolicy.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_gateway_security_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetGatewaySecurityPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGatewaySecurityPolicyRule( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicyRule, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetGatewaySecurityPolicyRule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy_rule.GatewaySecurityPolicyRule: + r"""Call the get gateway security + policy rule method over HTTP. + + Args: + request (~.gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest): + The request object. Request used by the + GetGatewaySecurityPolicyRule method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gateway_security_policy_rule.GatewaySecurityPolicyRule: + The GatewaySecurityPolicyRule + resource is in a nested collection + within a GatewaySecurityPolicy and + represents a traffic matching condition + and associated action to perform. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicyRule._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_gateway_security_policy_rule( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicyRule._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicyRule._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetGatewaySecurityPolicyRule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetGatewaySecurityPolicyRule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetGatewaySecurityPolicyRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gateway_security_policy_rule.GatewaySecurityPolicyRule() + pb_resp = gateway_security_policy_rule.GatewaySecurityPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_gateway_security_policy_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_gateway_security_policy_rule_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gateway_security_policy_rule.GatewaySecurityPolicyRule.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_gateway_security_policy_rule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetGatewaySecurityPolicyRule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetServerTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseGetServerTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetServerTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: server_tls_policy.GetServerTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> server_tls_policy.ServerTlsPolicy: + r"""Call the get server tls policy method over HTTP. + + Args: + request (~.server_tls_policy.GetServerTlsPolicyRequest): + The request object. Request used by the + GetServerTlsPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.server_tls_policy.ServerTlsPolicy: + ServerTlsPolicy is a resource that specifies how a + server should authenticate incoming requests. This + resource itself does not affect configuration unless it + is attached to a target HTTPS proxy or endpoint config + selector resource. + + ServerTlsPolicy in the form accepted by Application Load + Balancers can be attached only to TargetHttpsProxy with + an ``EXTERNAL``, ``EXTERNAL_MANAGED`` or + ``INTERNAL_MANAGED`` load balancing scheme. Traffic + Director compatible ServerTlsPolicies can be attached to + EndpointPolicy and TargetHttpsProxy with Traffic + Director ``INTERNAL_SELF_MANAGED`` load balancing + scheme. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetServerTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_server_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetServerTlsPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetServerTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetServerTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetServerTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetServerTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = server_tls_policy.ServerTlsPolicy() + pb_resp = server_tls_policy.ServerTlsPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_server_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_server_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = server_tls_policy.ServerTlsPolicy.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_server_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetServerTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetTlsInspectionPolicy( + _BaseNetworkSecurityRestTransport._BaseGetTlsInspectionPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetTlsInspectionPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: tls_inspection_policy.GetTlsInspectionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tls_inspection_policy.TlsInspectionPolicy: + r"""Call the get tls inspection policy method over HTTP. + + Args: + request (~.tls_inspection_policy.GetTlsInspectionPolicyRequest): + The request object. Request used by the + GetTlsInspectionPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.tls_inspection_policy.TlsInspectionPolicy: + The TlsInspectionPolicy resource + contains references to CA pools in + Certificate Authority Service and + associated metadata. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetTlsInspectionPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_tls_inspection_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetTlsInspectionPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetTlsInspectionPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetTlsInspectionPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetTlsInspectionPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._GetTlsInspectionPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tls_inspection_policy.TlsInspectionPolicy() + pb_resp = tls_inspection_policy.TlsInspectionPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_tls_inspection_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tls_inspection_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + tls_inspection_policy.TlsInspectionPolicy.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_tls_inspection_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetTlsInspectionPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetUrlList( + _BaseNetworkSecurityRestTransport._BaseGetUrlList, NetworkSecurityRestStub + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.GetUrlList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: url_list.GetUrlListRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> url_list.UrlList: + r"""Call the get url list method over HTTP. + + Args: + request (~.url_list.GetUrlListRequest): + The request object. Request used by the GetUrlList + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.url_list.UrlList: + UrlList proto helps users to set + reusable, independently manageable lists + of hosts, host patterns, URLs, URL + patterns. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseGetUrlList._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_url_list(request, metadata) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetUrlList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseGetUrlList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetUrlList", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetUrlList", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._GetUrlList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = url_list.UrlList() + pb_resp = url_list.UrlList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_url_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_url_list_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = url_list.UrlList.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_url_list", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "GetUrlList", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAuthorizationPolicies( + _BaseNetworkSecurityRestTransport._BaseListAuthorizationPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListAuthorizationPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authorization_policy.ListAuthorizationPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authorization_policy.ListAuthorizationPoliciesResponse: + r"""Call the list authorization + policies method over HTTP. + + Args: + request (~.authorization_policy.ListAuthorizationPoliciesRequest): + The request object. Request used with the + ListAuthorizationPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.authorization_policy.ListAuthorizationPoliciesResponse: + Response returned by the + ListAuthorizationPolicies method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListAuthorizationPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_authorization_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListAuthorizationPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListAuthorizationPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListAuthorizationPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListAuthorizationPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._ListAuthorizationPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = authorization_policy.ListAuthorizationPoliciesResponse() + pb_resp = authorization_policy.ListAuthorizationPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_authorization_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_authorization_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + authorization_policy.ListAuthorizationPoliciesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_authorization_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListAuthorizationPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAuthzPolicies( + _BaseNetworkSecurityRestTransport._BaseListAuthzPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListAuthzPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: authz_policy.ListAuthzPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> authz_policy.ListAuthzPoliciesResponse: + r"""Call the list authz policies method over HTTP. + + Args: + request (~.authz_policy.ListAuthzPoliciesRequest): + The request object. Message for requesting list of ``AuthzPolicy`` + resources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.authz_policy.ListAuthzPoliciesResponse: + Message for response to listing ``AuthzPolicy`` + resources. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListAuthzPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_authz_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListAuthzPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListAuthzPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListAuthzPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListAuthzPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._ListAuthzPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = authz_policy.ListAuthzPoliciesResponse() + pb_resp = authz_policy.ListAuthzPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_authz_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_authz_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = authz_policy.ListAuthzPoliciesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_authz_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListAuthzPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackendAuthenticationConfigs( + _BaseNetworkSecurityRestTransport._BaseListBackendAuthenticationConfigs, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListBackendAuthenticationConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backend_authentication_config.ListBackendAuthenticationConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backend_authentication_config.ListBackendAuthenticationConfigsResponse: + r"""Call the list backend + authentication configs method over HTTP. + + Args: + request (~.backend_authentication_config.ListBackendAuthenticationConfigsRequest): + The request object. Request used by the + ListBackendAuthenticationConfigs method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backend_authentication_config.ListBackendAuthenticationConfigsResponse: + Response returned by the + ListBackendAuthenticationConfigs method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListBackendAuthenticationConfigs._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_backend_authentication_configs( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListBackendAuthenticationConfigs._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListBackendAuthenticationConfigs._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListBackendAuthenticationConfigs", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListBackendAuthenticationConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._ListBackendAuthenticationConfigs._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + pb_resp = backend_authentication_config.ListBackendAuthenticationConfigsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backend_authentication_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_backend_authentication_configs_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backend_authentication_config.ListBackendAuthenticationConfigsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_backend_authentication_configs", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListBackendAuthenticationConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListClientTlsPolicies( + _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListClientTlsPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: client_tls_policy.ListClientTlsPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> client_tls_policy.ListClientTlsPoliciesResponse: + r"""Call the list client tls policies method over HTTP. + + Args: + request (~.client_tls_policy.ListClientTlsPoliciesRequest): + The request object. Request used by the + ListClientTlsPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.client_tls_policy.ListClientTlsPoliciesResponse: + Response returned by the + ListClientTlsPolicies method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_client_tls_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListClientTlsPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListClientTlsPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._ListClientTlsPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = client_tls_policy.ListClientTlsPoliciesResponse() + pb_resp = client_tls_policy.ListClientTlsPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_client_tls_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_client_tls_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + client_tls_policy.ListClientTlsPoliciesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_client_tls_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListClientTlsPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGatewaySecurityPolicies( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListGatewaySecurityPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy.ListGatewaySecurityPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy.ListGatewaySecurityPoliciesResponse: + r"""Call the list gateway security + policies method over HTTP. + + Args: + request (~.gateway_security_policy.ListGatewaySecurityPoliciesRequest): + The request object. Request used with the + ListGatewaySecurityPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gateway_security_policy.ListGatewaySecurityPoliciesResponse: + Response returned by the + ListGatewaySecurityPolicies method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_gateway_security_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListGatewaySecurityPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListGatewaySecurityPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._ListGatewaySecurityPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gateway_security_policy.ListGatewaySecurityPoliciesResponse() + pb_resp = gateway_security_policy.ListGatewaySecurityPoliciesResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_gateway_security_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_gateway_security_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gateway_security_policy.ListGatewaySecurityPoliciesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_gateway_security_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListGatewaySecurityPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGatewaySecurityPolicyRules( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicyRules, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListGatewaySecurityPolicyRules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse: + r"""Call the list gateway security + policy rules method over HTTP. + + Args: + request (~.gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest): + The request object. Request used with the + ListGatewaySecurityPolicyRules method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse: + Response returned by the + ListGatewaySecurityPolicyRules method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicyRules._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_gateway_security_policy_rules( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicyRules._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicyRules._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListGatewaySecurityPolicyRules", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListGatewaySecurityPolicyRules", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._ListGatewaySecurityPolicyRules._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + pb_resp = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_gateway_security_policy_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_gateway_security_policy_rules_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_gateway_security_policy_rules", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListGatewaySecurityPolicyRules", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListServerTlsPolicies( + _BaseNetworkSecurityRestTransport._BaseListServerTlsPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListServerTlsPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: server_tls_policy.ListServerTlsPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> server_tls_policy.ListServerTlsPoliciesResponse: + r"""Call the list server tls policies method over HTTP. + + Args: + request (~.server_tls_policy.ListServerTlsPoliciesRequest): + The request object. Request used by the + ListServerTlsPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.server_tls_policy.ListServerTlsPoliciesResponse: + Response returned by the + ListServerTlsPolicies method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListServerTlsPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_server_tls_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListServerTlsPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListServerTlsPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListServerTlsPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListServerTlsPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._ListServerTlsPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = server_tls_policy.ListServerTlsPoliciesResponse() + pb_resp = server_tls_policy.ListServerTlsPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_server_tls_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_server_tls_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + server_tls_policy.ListServerTlsPoliciesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_server_tls_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListServerTlsPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTlsInspectionPolicies( + _BaseNetworkSecurityRestTransport._BaseListTlsInspectionPolicies, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListTlsInspectionPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: tls_inspection_policy.ListTlsInspectionPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tls_inspection_policy.ListTlsInspectionPoliciesResponse: + r"""Call the list tls inspection + policies method over HTTP. + + Args: + request (~.tls_inspection_policy.ListTlsInspectionPoliciesRequest): + The request object. Request used with the + ListTlsInspectionPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.tls_inspection_policy.ListTlsInspectionPoliciesResponse: + Response returned by the + ListTlsInspectionPolicies method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListTlsInspectionPolicies._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_tls_inspection_policies( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListTlsInspectionPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListTlsInspectionPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListTlsInspectionPolicies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListTlsInspectionPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._ListTlsInspectionPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + pb_resp = tls_inspection_policy.ListTlsInspectionPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_tls_inspection_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tls_inspection_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_tls_inspection_policies", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListTlsInspectionPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListUrlLists( + _BaseNetworkSecurityRestTransport._BaseListUrlLists, NetworkSecurityRestStub + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.ListUrlLists") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: url_list.ListUrlListsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> url_list.ListUrlListsResponse: + r"""Call the list url lists method over HTTP. + + Args: + request (~.url_list.ListUrlListsRequest): + The request object. Request used by the ListUrlList + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.url_list.ListUrlListsResponse: + Response returned by the ListUrlLists + method. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseListUrlLists._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_url_lists(request, metadata) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseListUrlLists._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseListUrlLists._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListUrlLists", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListUrlLists", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._ListUrlLists._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = url_list.ListUrlListsResponse() + pb_resp = url_list.ListUrlListsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_url_lists(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_url_lists_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = url_list.ListUrlListsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_url_lists", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "ListUrlLists", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateAuthorizationPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.UpdateAuthorizationPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_authorization_policy.UpdateAuthorizationPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update authorization + policy method over HTTP. + + Args: + request (~.gcn_authorization_policy.UpdateAuthorizationPolicyRequest): + The request object. Request used by the + UpdateAuthorizationPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_authorization_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateAuthorizationPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateAuthorizationPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + ) + + # Send the request + response = ( + NetworkSecurityRestTransport._UpdateAuthorizationPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_authorization_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_authorization_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_authorization_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateAuthorizationPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, }, - { - "method": "delete", - "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + ) + return resp + + class _UpdateAuthzPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.UpdateAuthzPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_authz_policy.UpdateAuthzPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update authz policy method over HTTP. + + Args: + request (~.gcn_authz_policy.UpdateAuthzPolicyRequest): + The request object. Message for updating an ``AuthzPolicy`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_authz_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateAuthzPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateAuthzPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + ) + + # Send the request + response = NetworkSecurityRestTransport._UpdateAuthzPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_authz_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_authz_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_authz_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateAuthzPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, }, - { - "method": "get", - "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + ) + return resp + + class _UpdateBackendAuthenticationConfig( + _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash( + "NetworkSecurityRestTransport.UpdateBackendAuthenticationConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update backend + authentication config method over HTTP. + + Args: + request (~.gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest): + The request object. Request used by + UpdateBackendAuthenticationConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_update_backend_authentication_config( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateBackendAuthenticationConfig", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateBackendAuthenticationConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + ) + + # Send the request + response = NetworkSecurityRestTransport._UpdateBackendAuthenticationConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_backend_authentication_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_backend_authentication_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_backend_authentication_config", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateBackendAuthenticationConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, }, - { - "method": "get", - "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + ) + return resp + + class _UpdateClientTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.UpdateClientTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update client tls policy method over HTTP. + + Args: + request (~.gcn_client_tls_policy.UpdateClientTlsPolicyRequest): + The request object. Request used by UpdateClientTlsPolicy + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_client_tls_policy( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateClientTlsPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateClientTlsPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], }, - ], - } + ) - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1alpha1", + # Send the request + response = ( + NetworkSecurityRestTransport._UpdateClientTlsPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) ) - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - # Return the client from cache. - return self._operations_client + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - class _CreateClientTlsPolicy( - _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy, + resp = self._interceptor.post_update_client_tls_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_client_tls_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_client_tls_policy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateClientTlsPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGatewaySecurityPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy, NetworkSecurityRestStub, ): def __hash__(self): - return hash("NetworkSecurityRestTransport.CreateClientTlsPolicy") + return hash("NetworkSecurityRestTransport.UpdateGatewaySecurityPolicy") @staticmethod def _get_response( @@ -786,51 +9384,52 @@ def _get_response( def __call__( self, - request: gcn_client_tls_policy.CreateClientTlsPolicyRequest, + request: gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create client tls policy method over HTTP. - - Args: - request (~.gcn_client_tls_policy.CreateClientTlsPolicyRequest): - The request object. Request used by the - CreateClientTlsPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + r"""Call the update gateway security + policy method over HTTP. + + Args: + request (~.gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest): + The request object. Request used by the + UpdateGatewaySecurityPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_http_options() + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy._get_http_options() ) - request, metadata = self._interceptor.pre_create_client_tls_policy( + request, metadata = self._interceptor.pre_update_gateway_security_policy( request, metadata ) - transcoded_request = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_transcoded_request( + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy._get_transcoded_request( http_options, request ) - body = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_request_body_json( + body = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseNetworkSecurityRestTransport._BaseCreateClientTlsPolicy._get_query_params_json( + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy._get_query_params_json( transcoded_request ) @@ -852,10 +9451,10 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.CreateClientTlsPolicy", + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateGatewaySecurityPolicy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "CreateClientTlsPolicy", + "rpcName": "UpdateGatewaySecurityPolicy", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -863,7 +9462,7 @@ def __call__( # Send the request response = ( - NetworkSecurityRestTransport._CreateClientTlsPolicy._get_response( + NetworkSecurityRestTransport._UpdateGatewaySecurityPolicy._get_response( self._host, metadata, query_params, @@ -883,9 +9482,12 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_client_tls_policy(resp) + resp = self._interceptor.post_update_gateway_security_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_client_tls_policy_with_metadata( + ( + resp, + _, + ) = self._interceptor.post_update_gateway_security_policy_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -901,22 +9503,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.create_client_tls_policy", + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_gateway_security_policy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "CreateClientTlsPolicy", + "rpcName": "UpdateGatewaySecurityPolicy", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteClientTlsPolicy( - _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy, + class _UpdateGatewaySecurityPolicyRule( + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule, NetworkSecurityRestStub, ): def __hash__(self): - return hash("NetworkSecurityRestTransport.DeleteClientTlsPolicy") + return hash("NetworkSecurityRestTransport.UpdateGatewaySecurityPolicyRule") @staticmethod def _get_response( @@ -937,23 +9539,186 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: client_tls_policy.DeleteClientTlsPolicyRequest, + request: gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete client tls policy method over HTTP. + r"""Call the update gateway security + policy rule method over HTTP. + + Args: + request (~.gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest): + The request object. Request used by the + UpdateGatewaySecurityPolicyRule method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_update_gateway_security_policy_rule( + request, metadata + ) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule._get_transcoded_request( + http_options, request + ) + + body = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateGatewaySecurityPolicyRule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateGatewaySecurityPolicyRule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = NetworkSecurityRestTransport._UpdateGatewaySecurityPolicyRule._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_gateway_security_policy_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_gateway_security_policy_rule_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_gateway_security_policy_rule", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "rpcName": "UpdateGatewaySecurityPolicyRule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateServerTlsPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy, + NetworkSecurityRestStub, + ): + def __hash__(self): + return hash("NetworkSecurityRestTransport.UpdateServerTlsPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_server_tls_policy.UpdateServerTlsPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update server tls policy method over HTTP. Args: - request (~.client_tls_policy.DeleteClientTlsPolicyRequest): - The request object. Request used by the - DeleteClientTlsPolicy method. + request (~.gcn_server_tls_policy.UpdateServerTlsPolicyRequest): + The request object. Request used by UpdateServerTlsPolicy + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -971,18 +9736,22 @@ def __call__( """ http_options = ( - _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_http_options() + _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy._get_http_options() ) - request, metadata = self._interceptor.pre_delete_client_tls_policy( + request, metadata = self._interceptor.pre_update_server_tls_policy( request, metadata ) - transcoded_request = _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_transcoded_request( + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy._get_transcoded_request( http_options, request ) + body = _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_query_params_json( + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy._get_query_params_json( transcoded_request ) @@ -1004,10 +9773,10 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.DeleteClientTlsPolicy", + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateServerTlsPolicy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "DeleteClientTlsPolicy", + "rpcName": "UpdateServerTlsPolicy", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -1015,13 +9784,14 @@ def __call__( # Send the request response = ( - NetworkSecurityRestTransport._DeleteClientTlsPolicy._get_response( + NetworkSecurityRestTransport._UpdateServerTlsPolicy._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) ) @@ -1034,9 +9804,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_client_tls_policy(resp) + resp = self._interceptor.post_update_server_tls_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_client_tls_policy_with_metadata( + resp, _ = self._interceptor.post_update_server_tls_policy_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1052,22 +9822,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.delete_client_tls_policy", + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_server_tls_policy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "DeleteClientTlsPolicy", + "rpcName": "UpdateServerTlsPolicy", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetClientTlsPolicy( - _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy, + class _UpdateTlsInspectionPolicy( + _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy, NetworkSecurityRestStub, ): def __hash__(self): - return hash("NetworkSecurityRestTransport.GetClientTlsPolicy") + return hash("NetworkSecurityRestTransport.UpdateTlsInspectionPolicy") @staticmethod def _get_response( @@ -1088,55 +9858,58 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: client_tls_policy.GetClientTlsPolicyRequest, + request: gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_tls_policy.ClientTlsPolicy: - r"""Call the get client tls policy method over HTTP. - - Args: - request (~.client_tls_policy.GetClientTlsPolicyRequest): - The request object. Request used by the - GetClientTlsPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.client_tls_policy.ClientTlsPolicy: - ClientTlsPolicy is a resource that - specifies how a client should - authenticate connections to backends of - a service. This resource itself does not - affect configuration unless it is - attached to a backend service resource. + ) -> operations_pb2.Operation: + r"""Call the update tls inspection + policy method over HTTP. + + Args: + request (~.gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest): + The request object. Request used by the + UpdateTlsInspectionPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_http_options() + _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy._get_http_options() ) - request, metadata = self._interceptor.pre_get_client_tls_policy( + request, metadata = self._interceptor.pre_update_tls_inspection_policy( request, metadata ) - transcoded_request = _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_transcoded_request( + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy._get_transcoded_request( http_options, request ) + body = _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_query_params_json( + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy._get_query_params_json( transcoded_request ) @@ -1148,7 +9921,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -1158,23 +9931,26 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.GetClientTlsPolicy", + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateTlsInspectionPolicy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "GetClientTlsPolicy", + "rpcName": "UpdateTlsInspectionPolicy", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = NetworkSecurityRestTransport._GetClientTlsPolicy._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, + response = ( + NetworkSecurityRestTransport._UpdateTlsInspectionPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1183,23 +9959,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = client_tls_policy.ClientTlsPolicy() - pb_resp = client_tls_policy.ClientTlsPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_client_tls_policy(resp) + resp = self._interceptor.post_update_tls_inspection_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_client_tls_policy_with_metadata( + resp, _ = self._interceptor.post_update_tls_inspection_policy_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = client_tls_policy.ClientTlsPolicy.to_json( - response - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -1208,22 +9980,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.get_client_tls_policy", + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_tls_inspection_policy", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "GetClientTlsPolicy", + "rpcName": "UpdateTlsInspectionPolicy", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListClientTlsPolicies( - _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies, - NetworkSecurityRestStub, + class _UpdateUrlList( + _BaseNetworkSecurityRestTransport._BaseUpdateUrlList, NetworkSecurityRestStub ): def __hash__(self): - return hash("NetworkSecurityRestTransport.ListClientTlsPolicies") + return hash("NetworkSecurityRestTransport.UpdateUrlList") @staticmethod def _get_response( @@ -1244,23 +10015,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: client_tls_policy.ListClientTlsPoliciesRequest, + request: gcn_url_list.UpdateUrlListRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_tls_policy.ListClientTlsPoliciesResponse: - r"""Call the list client tls policies method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update url list method over HTTP. Args: - request (~.client_tls_policy.ListClientTlsPoliciesRequest): - The request object. Request used by the - ListClientTlsPolicies method. + request (~.gcn_url_list.UpdateUrlListRequest): + The request object. Request used by UpdateUrlList method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1270,25 +10041,28 @@ def __call__( be of type `bytes`. Returns: - ~.client_tls_policy.ListClientTlsPoliciesResponse: - Response returned by the - ListClientTlsPolicies method. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_http_options() + _BaseNetworkSecurityRestTransport._BaseUpdateUrlList._get_http_options() ) - request, metadata = self._interceptor.pre_list_client_tls_policies( - request, metadata - ) - transcoded_request = _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_transcoded_request( + request, metadata = self._interceptor.pre_update_url_list(request, metadata) + transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateUrlList._get_transcoded_request( http_options, request ) + body = _BaseNetworkSecurityRestTransport._BaseUpdateUrlList._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_query_params_json( + query_params = _BaseNetworkSecurityRestTransport._BaseUpdateUrlList._get_query_params_json( transcoded_request ) @@ -1300,7 +10074,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -1310,25 +10084,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.ListClientTlsPolicies", + f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateUrlList", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "ListClientTlsPolicies", + "rpcName": "UpdateUrlList", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ( - NetworkSecurityRestTransport._ListClientTlsPolicies._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) + response = NetworkSecurityRestTransport._UpdateUrlList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1337,25 +10110,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = client_tls_policy.ListClientTlsPoliciesResponse() - pb_resp = client_tls_policy.ListClientTlsPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_client_tls_policies(resp) + resp = self._interceptor.post_update_url_list(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_client_tls_policies_with_metadata( + resp, _ = self._interceptor.post_update_url_list_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - client_tls_policy.ListClientTlsPoliciesResponse.to_json( - response - ) - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -1364,202 +10131,320 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.list_client_tls_policies", + "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_url_list", extra={ "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "ListClientTlsPolicies", + "rpcName": "UpdateUrlList", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _UpdateClientTlsPolicy( - _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy, - NetworkSecurityRestStub, - ): - def __hash__(self): - return hash("NetworkSecurityRestTransport.UpdateClientTlsPolicy") + @property + def create_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.CreateAuthorizationPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAuthorizationPolicy(self._session, self._host, self._interceptor) # type: ignore - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response + @property + def create_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.CreateAuthzPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAuthzPolicy(self._session, self._host, self._interceptor) # type: ignore - def __call__( - self, - request: gcn_client_tls_policy.UpdateClientTlsPolicyRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the update client tls policy method over HTTP. + @property + def create_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackendAuthenticationConfig(self._session, self._host, self._interceptor) # type: ignore - Args: - request (~.gcn_client_tls_policy.UpdateClientTlsPolicyRequest): - The request object. Request used by UpdateClientTlsPolicy - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + @property + def create_client_tls_policy( + self, + ) -> Callable[ + [gcn_client_tls_policy.CreateClientTlsPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGatewaySecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGatewaySecurityPolicyRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.CreateServerTlsPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateServerTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTlsInspectionPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_url_list( + self, + ) -> Callable[[gcn_url_list.CreateUrlListRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateUrlList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.DeleteAuthorizationPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAuthorizationPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_authz_policy( + self, + ) -> Callable[[authz_policy.DeleteAuthzPolicyRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAuthzPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.DeleteBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackendAuthenticationConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.DeleteClientTlsPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.DeleteGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGatewaySecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGatewaySecurityPolicyRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.DeleteServerTlsPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteServerTlsPolicy(self._session, self._host, self._interceptor) # type: ignore - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + @property + def delete_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.DeleteTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTlsInspectionPolicy(self._session, self._host, self._interceptor) # type: ignore - """ + @property + def delete_url_list( + self, + ) -> Callable[[url_list.DeleteUrlListRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteUrlList(self._session, self._host, self._interceptor) # type: ignore - http_options = ( - _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_http_options() - ) + @property + def get_authorization_policy( + self, + ) -> Callable[ + [authorization_policy.GetAuthorizationPolicyRequest], + authorization_policy.AuthorizationPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAuthorizationPolicy(self._session, self._host, self._interceptor) # type: ignore - request, metadata = self._interceptor.pre_update_client_tls_policy( - request, metadata - ) - transcoded_request = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_transcoded_request( - http_options, request - ) + @property + def get_authz_policy( + self, + ) -> Callable[[authz_policy.GetAuthzPolicyRequest], authz_policy.AuthzPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAuthzPolicy(self._session, self._host, self._interceptor) # type: ignore - body = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_request_body_json( - transcoded_request - ) + @property + def get_backend_authentication_config( + self, + ) -> Callable[ + [backend_authentication_config.GetBackendAuthenticationConfigRequest], + backend_authentication_config.BackendAuthenticationConfig, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackendAuthenticationConfig(self._session, self._host, self._interceptor) # type: ignore - # Jsonify the query params - query_params = _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_query_params_json( - transcoded_request - ) + @property + def get_client_tls_policy( + self, + ) -> Callable[ + [client_tls_policy.GetClientTlsPolicyRequest], client_tls_policy.ClientTlsPolicy + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.UpdateClientTlsPolicy", - extra={ - "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "UpdateClientTlsPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) + @property + def get_gateway_security_policy( + self, + ) -> Callable[ + [gateway_security_policy.GetGatewaySecurityPolicyRequest], + gateway_security_policy.GatewaySecurityPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGatewaySecurityPolicy(self._session, self._host, self._interceptor) # type: ignore - # Send the request - response = ( - NetworkSecurityRestTransport._UpdateClientTlsPolicy._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - ) + @property + def get_gateway_security_policy_rule( + self, + ) -> Callable[ + [gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest], + gateway_security_policy_rule.GatewaySecurityPolicyRule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGatewaySecurityPolicyRule(self._session, self._host, self._interceptor) # type: ignore - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) + @property + def get_server_tls_policy( + self, + ) -> Callable[ + [server_tls_policy.GetServerTlsPolicyRequest], server_tls_policy.ServerTlsPolicy + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetServerTlsPolicy(self._session, self._host, self._interceptor) # type: ignore - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + @property + def get_tls_inspection_policy( + self, + ) -> Callable[ + [tls_inspection_policy.GetTlsInspectionPolicyRequest], + tls_inspection_policy.TlsInspectionPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTlsInspectionPolicy(self._session, self._host, self._interceptor) # type: ignore - resp = self._interceptor.post_update_client_tls_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_client_tls_policy_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.networksecurity_v1alpha1.NetworkSecurityClient.update_client_tls_policy", - extra={ - "serviceName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "rpcName": "UpdateClientTlsPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp + @property + def get_url_list(self) -> Callable[[url_list.GetUrlListRequest], url_list.UrlList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetUrlList(self._session, self._host, self._interceptor) # type: ignore @property - def create_client_tls_policy( + def list_authorization_policies( self, ) -> Callable[ - [gcn_client_tls_policy.CreateClientTlsPolicyRequest], operations_pb2.Operation + [authorization_policy.ListAuthorizationPoliciesRequest], + authorization_policy.ListAuthorizationPoliciesResponse, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._ListAuthorizationPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def delete_client_tls_policy( + def list_authz_policies( self, ) -> Callable[ - [client_tls_policy.DeleteClientTlsPolicyRequest], operations_pb2.Operation + [authz_policy.ListAuthzPoliciesRequest], authz_policy.ListAuthzPoliciesResponse ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._ListAuthzPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def get_client_tls_policy( + def list_backend_authentication_configs( self, ) -> Callable[ - [client_tls_policy.GetClientTlsPolicyRequest], client_tls_policy.ClientTlsPolicy + [backend_authentication_config.ListBackendAuthenticationConfigsRequest], + backend_authentication_config.ListBackendAuthenticationConfigsResponse, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._ListBackendAuthenticationConfigs(self._session, self._host, self._interceptor) # type: ignore @property def list_client_tls_policies( @@ -1572,6 +10457,90 @@ def list_client_tls_policies( # In C++ this would require a dynamic_cast return self._ListClientTlsPolicies(self._session, self._host, self._interceptor) # type: ignore + @property + def list_gateway_security_policies( + self, + ) -> Callable[ + [gateway_security_policy.ListGatewaySecurityPoliciesRequest], + gateway_security_policy.ListGatewaySecurityPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGatewaySecurityPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_gateway_security_policy_rules( + self, + ) -> Callable[ + [gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest], + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGatewaySecurityPolicyRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_server_tls_policies( + self, + ) -> Callable[ + [server_tls_policy.ListServerTlsPoliciesRequest], + server_tls_policy.ListServerTlsPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServerTlsPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tls_inspection_policies( + self, + ) -> Callable[ + [tls_inspection_policy.ListTlsInspectionPoliciesRequest], + tls_inspection_policy.ListTlsInspectionPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTlsInspectionPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_url_lists( + self, + ) -> Callable[[url_list.ListUrlListsRequest], url_list.ListUrlListsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUrlLists(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_authorization_policy( + self, + ) -> Callable[ + [gcn_authorization_policy.UpdateAuthorizationPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAuthorizationPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_authz_policy( + self, + ) -> Callable[ + [gcn_authz_policy.UpdateAuthzPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAuthzPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backend_authentication_config( + self, + ) -> Callable[ + [gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackendAuthenticationConfig(self._session, self._host, self._interceptor) # type: ignore + @property def update_client_tls_policy( self, @@ -1582,6 +10551,57 @@ def update_client_tls_policy( # In C++ this would require a dynamic_cast return self._UpdateClientTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + @property + def update_gateway_security_policy( + self, + ) -> Callable[ + [gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGatewaySecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_gateway_security_policy_rule( + self, + ) -> Callable[ + [gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGatewaySecurityPolicyRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_server_tls_policy( + self, + ) -> Callable[ + [gcn_server_tls_policy.UpdateServerTlsPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateServerTlsPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_tls_inspection_policy( + self, + ) -> Callable[ + [gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTlsInspectionPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_url_list( + self, + ) -> Callable[[gcn_url_list.UpdateUrlListRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateUrlList(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest_base.py index a400acdd26bc..3b50cfa7b838 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest_base.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/network_security/transports/rest_base.py @@ -24,10 +24,40 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list from .base import DEFAULT_CLIENT_INFO, NetworkSecurityTransport @@ -94,6 +124,187 @@ def __init__( api_audience=api_audience, ) + class _BaseCreateAuthorizationPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "authorizationPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/authorizationPolicies", + "body": "authorization_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_authorization_policy.CreateAuthorizationPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseCreateAuthorizationPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateAuthzPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "authzPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/authzPolicies", + "body": "authz_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_authz_policy.CreateAuthzPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseCreateAuthzPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackendAuthenticationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backendAuthenticationConfigId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/backendAuthenticationConfigs", + "body": "backend_authentication_config", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseCreateBackendAuthenticationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateClientTlsPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -153,11 +364,13 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseDeleteClientTlsPolicy: + class _BaseCreateGatewaySecurityPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "gatewaySecurityPolicyId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -171,18 +384,32 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}", + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/gatewaySecurityPolicies", + "body": "gateway_security_policy", }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = client_tls_policy.DeleteClientTlsPolicyRequest.pb(request) + pb_request = ( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest.pb( + request + ) + ) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + @staticmethod def _get_query_params_json(transcoded_request): query_params = json.loads( @@ -192,7 +419,7 @@ def _get_query_params_json(transcoded_request): ) ) query_params.update( - _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_unset_required_fields( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicy._get_unset_required_fields( query_params ) ) @@ -200,7 +427,7 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseGetClientTlsPolicy: + class _BaseCreateGatewaySecurityPolicyRule: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -218,18 +445,30 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}", + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*/gatewaySecurityPolicies/*}/rules", + "body": "gateway_security_policy_rule", }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = client_tls_policy.GetClientTlsPolicyRequest.pb(request) + pb_request = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + @staticmethod def _get_query_params_json(transcoded_request): query_params = json.loads( @@ -239,7 +478,7 @@ def _get_query_params_json(transcoded_request): ) ) query_params.update( - _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_unset_required_fields( + _BaseNetworkSecurityRestTransport._BaseCreateGatewaySecurityPolicyRule._get_unset_required_fields( query_params ) ) @@ -247,11 +486,13 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseListClientTlsPolicies: + class _BaseCreateServerTlsPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "serverTlsPolicyId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -265,18 +506,28 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies", + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/serverTlsPolicies", + "body": "server_tls_policy", }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = client_tls_policy.ListClientTlsPoliciesRequest.pb(request) + pb_request = gcn_server_tls_policy.CreateServerTlsPolicyRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + @staticmethod def _get_query_params_json(transcoded_request): query_params = json.loads( @@ -286,7 +537,7 @@ def _get_query_params_json(transcoded_request): ) ) query_params.update( - _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_unset_required_fields( + _BaseNetworkSecurityRestTransport._BaseCreateServerTlsPolicy._get_unset_required_fields( query_params ) ) @@ -294,11 +545,13 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseUpdateClientTlsPolicy: + class _BaseCreateTlsInspectionPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "tlsInspectionPolicyId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -312,16 +565,18 @@ def _get_unset_required_fields(cls, message_dict): def _get_http_options(): http_options: List[Dict[str, str]] = [ { - "method": "patch", - "uri": "/v1alpha1/{client_tls_policy.name=projects/*/locations/*/clientTlsPolicies/*}", - "body": "client_tls_policy", + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/tlsInspectionPolicies", + "body": "tls_inspection_policy", }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.pb(request) + pb_request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -343,7 +598,1896 @@ def _get_query_params_json(transcoded_request): ) ) query_params.update( - _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_unset_required_fields( + _BaseNetworkSecurityRestTransport._BaseCreateTlsInspectionPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateUrlList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "urlListId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/urlLists", + "body": "url_list", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_url_list.CreateUrlListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseCreateUrlList._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteAuthorizationPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/authorizationPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authorization_policy.DeleteAuthorizationPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthorizationPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteAuthzPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/authzPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authz_policy.DeleteAuthzPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteAuthzPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackendAuthenticationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/backendAuthenticationConfigs/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backend_authentication_config.DeleteBackendAuthenticationConfigRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteBackendAuthenticationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteClientTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = client_tls_policy.DeleteClientTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteClientTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGatewaySecurityPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGatewaySecurityPolicyRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteGatewaySecurityPolicyRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteServerTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/serverTlsPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = server_tls_policy.DeleteServerTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteServerTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTlsInspectionPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/tlsInspectionPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteTlsInspectionPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteUrlList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/urlLists/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = url_list.DeleteUrlListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseDeleteUrlList._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAuthorizationPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/authorizationPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authorization_policy.GetAuthorizationPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetAuthorizationPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAuthzPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/authzPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authz_policy.GetAuthzPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetAuthzPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackendAuthenticationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/backendAuthenticationConfigs/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetBackendAuthenticationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetClientTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = client_tls_policy.GetClientTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetClientTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGatewaySecurityPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gateway_security_policy.GetGatewaySecurityPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGatewaySecurityPolicyRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetGatewaySecurityPolicyRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetServerTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/serverTlsPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = server_tls_policy.GetServerTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetServerTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTlsInspectionPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/tlsInspectionPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = tls_inspection_policy.GetTlsInspectionPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetTlsInspectionPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetUrlList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/urlLists/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = url_list.GetUrlListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseGetUrlList._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAuthorizationPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/authorizationPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authorization_policy.ListAuthorizationPoliciesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListAuthorizationPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAuthzPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/authzPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = authz_policy.ListAuthzPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListAuthzPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackendAuthenticationConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/backendAuthenticationConfigs", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backend_authentication_config.ListBackendAuthenticationConfigsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListBackendAuthenticationConfigs._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListClientTlsPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = client_tls_policy.ListClientTlsPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListClientTlsPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGatewaySecurityPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/gatewaySecurityPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gateway_security_policy.ListGatewaySecurityPoliciesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGatewaySecurityPolicyRules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*/gatewaySecurityPolicies/*}/rules", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListGatewaySecurityPolicyRules._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListServerTlsPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/serverTlsPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = server_tls_policy.ListServerTlsPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListServerTlsPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTlsInspectionPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/tlsInspectionPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = tls_inspection_policy.ListTlsInspectionPoliciesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListTlsInspectionPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListUrlLists: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/urlLists", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = url_list.ListUrlListsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseListUrlLists._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateAuthorizationPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{authorization_policy.name=projects/*/locations/*/authorizationPolicies/*}", + "body": "authorization_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthorizationPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateAuthzPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{authz_policy.name=projects/*/locations/*/authzPolicies/*}", + "body": "authz_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_authz_policy.UpdateAuthzPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateAuthzPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackendAuthenticationConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{backend_authentication_config.name=projects/*/locations/*/backendAuthenticationConfigs/*}", + "body": "backend_authentication_config", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateBackendAuthenticationConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateClientTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{client_tls_policy.name=projects/*/locations/*/clientTlsPolicies/*}", + "body": "client_tls_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateClientTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGatewaySecurityPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{gateway_security_policy.name=projects/*/locations/*/gatewaySecurityPolicies/*}", + "body": "gateway_security_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGatewaySecurityPolicyRule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{gateway_security_policy_rule.name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}", + "body": "gateway_security_policy_rule", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateGatewaySecurityPolicyRule._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateServerTlsPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{server_tls_policy.name=projects/*/locations/*/serverTlsPolicies/*}", + "body": "server_tls_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateServerTlsPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateTlsInspectionPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{tls_inspection_policy.name=projects/*/locations/*/tlsInspectionPolicies/*}", + "body": "tls_inspection_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateTlsInspectionPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateUrlList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{url_list.name=projects/*/locations/*/urlLists/*}", + "body": "url_list", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_url_list.UpdateUrlListRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseNetworkSecurityRestTransport._BaseUpdateUrlList._get_unset_required_fields( query_params ) ) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/__init__.py new file mode 100644 index 000000000000..e41850c38f01 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OrganizationSecurityProfileGroupServiceAsyncClient +from .client import OrganizationSecurityProfileGroupServiceClient + +__all__ = ( + "OrganizationSecurityProfileGroupServiceClient", + "OrganizationSecurityProfileGroupServiceAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/async_client.py new file mode 100644 index 000000000000..0c6e11b295fb --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/async_client.py @@ -0,0 +1,2410 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service import ( + pagers, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group_intercept, + security_profile_group_mirroring, + security_profile_group_service, + security_profile_group_threatprevention, + security_profile_group_urlfiltering, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group as gcn_security_profile_group, +) +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import security_profile_group + +from .client import OrganizationSecurityProfileGroupServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + OrganizationSecurityProfileGroupServiceTransport, +) +from .transports.grpc_asyncio import ( + OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, +) + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class OrganizationSecurityProfileGroupServiceAsyncClient: + """Organization SecurityProfileGroup is created under + organization. + """ + + _client: OrganizationSecurityProfileGroupServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = OrganizationSecurityProfileGroupServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ( + OrganizationSecurityProfileGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + _DEFAULT_ENDPOINT_TEMPLATE = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + + intercept_endpoint_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.intercept_endpoint_group_path + ) + parse_intercept_endpoint_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_intercept_endpoint_group_path + ) + mirroring_endpoint_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.mirroring_endpoint_group_path + ) + parse_mirroring_endpoint_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_mirroring_endpoint_group_path + ) + security_profile_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.security_profile_path + ) + parse_security_profile_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_security_profile_path + ) + security_profile_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.security_profile_group_path + ) + parse_security_profile_group_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_security_profile_group_path + ) + common_billing_account_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + OrganizationSecurityProfileGroupServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationSecurityProfileGroupServiceAsyncClient: The constructed client. + """ + return OrganizationSecurityProfileGroupServiceClient.from_service_account_info.__func__(OrganizationSecurityProfileGroupServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationSecurityProfileGroupServiceAsyncClient: The constructed client. + """ + return OrganizationSecurityProfileGroupServiceClient.from_service_account_file.__func__(OrganizationSecurityProfileGroupServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OrganizationSecurityProfileGroupServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OrganizationSecurityProfileGroupServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OrganizationSecurityProfileGroupServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ( + OrganizationSecurityProfileGroupServiceClient.get_transport_class + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OrganizationSecurityProfileGroupServiceTransport, + Callable[..., OrganizationSecurityProfileGroupServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the organization security profile group service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OrganizationSecurityProfileGroupServiceTransport,Callable[..., OrganizationSecurityProfileGroupServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OrganizationSecurityProfileGroupServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OrganizationSecurityProfileGroupServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "credentialsType": None, + }, + ) + + async def list_security_profile_groups( + self, + request: Optional[ + Union[security_profile_group_service.ListSecurityProfileGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSecurityProfileGroupsAsyncPager: + r"""Lists SecurityProfileGroups in a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_security_profile_groups(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfileGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profile_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest, dict]]): + The request object. Request used with the + ListSecurityProfileGroups method. + parent (:class:`str`): + Required. The project or organization and location from + which the SecurityProfileGroups should be listed, + specified in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfileGroupsAsyncPager: + Response returned by the + ListSecurityProfileGroups method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.ListSecurityProfileGroupsRequest + ): + request = security_profile_group_service.ListSecurityProfileGroupsRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_security_profile_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSecurityProfileGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_security_profile_group( + self, + request: Optional[ + Union[security_profile_group_service.GetSecurityProfileGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfileGroup: + r"""Gets details of a single SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_profile_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetSecurityProfileGroupRequest, dict]]): + The request object. Request used by the + GetSecurityProfileGroup method. + name (:class:`str`): + Required. A name of the SecurityProfileGroup to get. + Must be in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SecurityProfileGroup: + SecurityProfileGroup is a resource + that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.GetSecurityProfileGroupRequest + ): + request = security_profile_group_service.GetSecurityProfileGroupRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.CreateSecurityProfileGroupRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + security_profile_group: Optional[ + gcn_security_profile_group.SecurityProfileGroup + ] = None, + security_profile_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SecurityProfileGroup in a given + organization and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + # Make the request + operation = client.create_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateSecurityProfileGroupRequest, dict]]): + The request object. Request used by the + CreateSecurityProfileGroup method. + parent (:class:`str`): + Required. The parent resource of the + SecurityProfileGroup. Must be in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_group (:class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup`): + Required. SecurityProfileGroup + resource to be created. + + This corresponds to the ``security_profile_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_group_id (:class:`str`): + Required. Short name of the SecurityProfileGroup + resource to be created. This value should be 1-63 + characters long, containing only letters, numbers, + hyphens, and underscores, and should not start with a + number. E.g. "security_profile_group1". + + This corresponds to the ``security_profile_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup` SecurityProfileGroup is a resource that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, security_profile_group, security_profile_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.CreateSecurityProfileGroupRequest + ): + request = security_profile_group_service.CreateSecurityProfileGroupRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_profile_group is not None: + request.security_profile_group = security_profile_group + if security_profile_group_id is not None: + request.security_profile_group_id = security_profile_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_security_profile_group.SecurityProfileGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.UpdateSecurityProfileGroupRequest, dict + ] + ] = None, + *, + security_profile_group: Optional[ + gcn_security_profile_group.SecurityProfileGroup + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single + SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileGroupRequest( + ) + + # Make the request + operation = client.update_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileGroupRequest, dict]]): + The request object. Request used by the + UpdateSecurityProfileGroup method. + security_profile_group (:class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup`): + Required. Updated + SecurityProfileGroup resource. + + This corresponds to the ``security_profile_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfileGroup resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup` SecurityProfileGroup is a resource that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [security_profile_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.UpdateSecurityProfileGroupRequest + ): + request = security_profile_group_service.UpdateSecurityProfileGroupRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_profile_group is not None: + request.security_profile_group = security_profile_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_profile_group.name", request.security_profile_group.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_security_profile_group.SecurityProfileGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.DeleteSecurityProfileGroupRequest, dict + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileGroupRequest, dict]]): + The request object. Request used by the + DeleteSecurityProfileGroup method. + name (:class:`str`): + Required. A name of the SecurityProfileGroup to delete. + Must be in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.DeleteSecurityProfileGroupRequest + ): + request = security_profile_group_service.DeleteSecurityProfileGroupRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_security_profiles( + self, + request: Optional[ + Union[security_profile_group_service.ListSecurityProfilesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSecurityProfilesAsyncPager: + r"""Lists SecurityProfiles in a given organization and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_security_profiles(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest, dict]]): + The request object. Request used with the + ListSecurityProfiles method. + parent (:class:`str`): + Required. The project or organization and location from + which the SecurityProfiles should be listed, specified + in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfilesAsyncPager: + Response returned by the + ListSecurityProfiles method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.ListSecurityProfilesRequest + ): + request = security_profile_group_service.ListSecurityProfilesRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_security_profiles + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSecurityProfilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.GetSecurityProfileRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfile: + r"""Gets details of a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetSecurityProfileRequest, dict]]): + The request object. Request used by the + GetSecurityProfile method. + name (:class:`str`): + Required. A name of the SecurityProfile to get. Must be + in the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SecurityProfile: + SecurityProfile is a resource that + defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.GetSecurityProfileRequest + ): + request = security_profile_group_service.GetSecurityProfileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_security_profile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.CreateSecurityProfileRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + security_profile: Optional[security_profile_group.SecurityProfile] = None, + security_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SecurityProfile in a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + # Make the request + operation = client.create_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateSecurityProfileRequest, dict]]): + The request object. Request used by the + CreateSecurityProfile method. + parent (:class:`str`): + Required. The parent resource of the SecurityProfile. + Must be in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile (:class:`google.cloud.network_security_v1alpha1.types.SecurityProfile`): + Required. SecurityProfile resource to + be created. + + This corresponds to the ``security_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_id (:class:`str`): + Required. Short name of the SecurityProfile resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "security_profile1". + + This corresponds to the ``security_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfile` SecurityProfile is a resource that defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, security_profile, security_profile_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.CreateSecurityProfileRequest + ): + request = security_profile_group_service.CreateSecurityProfileRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_profile is not None: + request.security_profile = security_profile + if security_profile_id is not None: + request.security_profile_id = security_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_security_profile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + security_profile_group.SecurityProfile, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.UpdateSecurityProfileRequest, dict] + ] = None, + *, + security_profile: Optional[security_profile_group.SecurityProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileRequest( + ) + + # Make the request + operation = client.update_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileRequest, dict]]): + The request object. Request used by the + UpdateSecurityProfile method. + security_profile (:class:`google.cloud.network_security_v1alpha1.types.SecurityProfile`): + Required. Updated SecurityProfile + resource. + + This corresponds to the ``security_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfile resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfile` SecurityProfile is a resource that defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [security_profile, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.UpdateSecurityProfileRequest + ): + request = security_profile_group_service.UpdateSecurityProfileRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_profile is not None: + request.security_profile = security_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_security_profile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_profile.name", request.security_profile.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + security_profile_group.SecurityProfile, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.DeleteSecurityProfileRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileRequest, dict]]): + The request object. Request used by the + DeleteSecurityProfile method. + name (:class:`str`): + Required. A name of the SecurityProfile to delete. Must + be in the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.DeleteSecurityProfileRequest + ): + request = security_profile_group_service.DeleteSecurityProfileRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_security_profile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OrganizationSecurityProfileGroupServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("OrganizationSecurityProfileGroupServiceAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/client.py new file mode 100644 index 000000000000..c0210c504e3b --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/client.py @@ -0,0 +1,2901 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service import ( + pagers, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group_intercept, + security_profile_group_mirroring, + security_profile_group_service, + security_profile_group_threatprevention, + security_profile_group_urlfiltering, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group as gcn_security_profile_group, +) +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import security_profile_group + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + OrganizationSecurityProfileGroupServiceTransport, +) +from .transports.grpc import OrganizationSecurityProfileGroupServiceGrpcTransport +from .transports.grpc_asyncio import ( + OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, +) +from .transports.rest import OrganizationSecurityProfileGroupServiceRestTransport + + +class OrganizationSecurityProfileGroupServiceClientMeta(type): + """Metaclass for the OrganizationSecurityProfileGroupService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OrganizationSecurityProfileGroupServiceTransport]] + _transport_registry["grpc"] = OrganizationSecurityProfileGroupServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OrganizationSecurityProfileGroupServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OrganizationSecurityProfileGroupServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OrganizationSecurityProfileGroupServiceClient( + metaclass=OrganizationSecurityProfileGroupServiceClientMeta +): + """Organization SecurityProfileGroup is created under + organization. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationSecurityProfileGroupServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationSecurityProfileGroupServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OrganizationSecurityProfileGroupServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OrganizationSecurityProfileGroupServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def intercept_endpoint_group_path( + project: str, + location: str, + intercept_endpoint_group: str, + ) -> str: + """Returns a fully-qualified intercept_endpoint_group string.""" + return "projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group}".format( + project=project, + location=location, + intercept_endpoint_group=intercept_endpoint_group, + ) + + @staticmethod + def parse_intercept_endpoint_group_path(path: str) -> Dict[str, str]: + """Parses a intercept_endpoint_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/interceptEndpointGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def mirroring_endpoint_group_path( + project: str, + location: str, + mirroring_endpoint_group: str, + ) -> str: + """Returns a fully-qualified mirroring_endpoint_group string.""" + return "projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group}".format( + project=project, + location=location, + mirroring_endpoint_group=mirroring_endpoint_group, + ) + + @staticmethod + def parse_mirroring_endpoint_group_path(path: str) -> Dict[str, str]: + """Parses a mirroring_endpoint_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/mirroringEndpointGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def security_profile_path( + organization: str, + location: str, + security_profile: str, + ) -> str: + """Returns a fully-qualified security_profile string.""" + return "organizations/{organization}/locations/{location}/securityProfiles/{security_profile}".format( + organization=organization, + location=location, + security_profile=security_profile, + ) + + @staticmethod + def parse_security_profile_path(path: str) -> Dict[str, str]: + """Parses a security_profile path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/securityProfiles/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def security_profile_group_path( + organization: str, + location: str, + security_profile_group: str, + ) -> str: + """Returns a fully-qualified security_profile_group string.""" + return "organizations/{organization}/locations/{location}/securityProfileGroups/{security_profile_group}".format( + organization=organization, + location=location, + security_profile_group=security_profile_group, + ) + + @staticmethod + def parse_security_profile_group_path(path: str) -> Dict[str, str]: + """Parses a security_profile_group path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/securityProfileGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + ) + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ( + OrganizationSecurityProfileGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + else: + api_endpoint = OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + ) + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OrganizationSecurityProfileGroupServiceTransport, + Callable[..., OrganizationSecurityProfileGroupServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the organization security profile group service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OrganizationSecurityProfileGroupServiceTransport,Callable[..., OrganizationSecurityProfileGroupServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OrganizationSecurityProfileGroupServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + self._client_cert_source = ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + OrganizationSecurityProfileGroupServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, OrganizationSecurityProfileGroupServiceTransport + ) + if transport_provided: + # transport is a OrganizationSecurityProfileGroupServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast( + OrganizationSecurityProfileGroupServiceTransport, transport + ) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[OrganizationSecurityProfileGroupServiceTransport], + Callable[..., OrganizationSecurityProfileGroupServiceTransport], + ] = ( + OrganizationSecurityProfileGroupServiceClient.get_transport_class( + transport + ) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., OrganizationSecurityProfileGroupServiceTransport], + transport, + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "credentialsType": None, + }, + ) + + def list_security_profile_groups( + self, + request: Optional[ + Union[security_profile_group_service.ListSecurityProfileGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSecurityProfileGroupsPager: + r"""Lists SecurityProfileGroups in a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_security_profile_groups(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfileGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profile_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest, dict]): + The request object. Request used with the + ListSecurityProfileGroups method. + parent (str): + Required. The project or organization and location from + which the SecurityProfileGroups should be listed, + specified in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfileGroupsPager: + Response returned by the + ListSecurityProfileGroups method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.ListSecurityProfileGroupsRequest + ): + request = security_profile_group_service.ListSecurityProfileGroupsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_security_profile_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSecurityProfileGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_security_profile_group( + self, + request: Optional[ + Union[security_profile_group_service.GetSecurityProfileGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfileGroup: + r"""Gets details of a single SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_profile_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetSecurityProfileGroupRequest, dict]): + The request object. Request used by the + GetSecurityProfileGroup method. + name (str): + Required. A name of the SecurityProfileGroup to get. + Must be in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SecurityProfileGroup: + SecurityProfileGroup is a resource + that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.GetSecurityProfileGroupRequest + ): + request = security_profile_group_service.GetSecurityProfileGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.CreateSecurityProfileGroupRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + security_profile_group: Optional[ + gcn_security_profile_group.SecurityProfileGroup + ] = None, + security_profile_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new SecurityProfileGroup in a given + organization and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + # Make the request + operation = client.create_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateSecurityProfileGroupRequest, dict]): + The request object. Request used by the + CreateSecurityProfileGroup method. + parent (str): + Required. The parent resource of the + SecurityProfileGroup. Must be in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_group (google.cloud.network_security_v1alpha1.types.SecurityProfileGroup): + Required. SecurityProfileGroup + resource to be created. + + This corresponds to the ``security_profile_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_group_id (str): + Required. Short name of the SecurityProfileGroup + resource to be created. This value should be 1-63 + characters long, containing only letters, numbers, + hyphens, and underscores, and should not start with a + number. E.g. "security_profile_group1". + + This corresponds to the ``security_profile_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup` SecurityProfileGroup is a resource that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, security_profile_group, security_profile_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.CreateSecurityProfileGroupRequest + ): + request = security_profile_group_service.CreateSecurityProfileGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_profile_group is not None: + request.security_profile_group = security_profile_group + if security_profile_group_id is not None: + request.security_profile_group_id = security_profile_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_security_profile_group.SecurityProfileGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.UpdateSecurityProfileGroupRequest, dict + ] + ] = None, + *, + security_profile_group: Optional[ + gcn_security_profile_group.SecurityProfileGroup + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single + SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileGroupRequest( + ) + + # Make the request + operation = client.update_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileGroupRequest, dict]): + The request object. Request used by the + UpdateSecurityProfileGroup method. + security_profile_group (google.cloud.network_security_v1alpha1.types.SecurityProfileGroup): + Required. Updated + SecurityProfileGroup resource. + + This corresponds to the ``security_profile_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfileGroup resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfileGroup` SecurityProfileGroup is a resource that defines the behavior for various + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [security_profile_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.UpdateSecurityProfileGroupRequest + ): + request = security_profile_group_service.UpdateSecurityProfileGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_profile_group is not None: + request.security_profile_group = security_profile_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_profile_group.name", request.security_profile_group.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_security_profile_group.SecurityProfileGroup, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_security_profile_group( + self, + request: Optional[ + Union[ + security_profile_group_service.DeleteSecurityProfileGroupRequest, dict + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single SecurityProfileGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileGroupRequest, dict]): + The request object. Request used by the + DeleteSecurityProfileGroup method. + name (str): + Required. A name of the SecurityProfileGroup to delete. + Must be in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.DeleteSecurityProfileGroupRequest + ): + request = security_profile_group_service.DeleteSecurityProfileGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_security_profile_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_security_profiles( + self, + request: Optional[ + Union[security_profile_group_service.ListSecurityProfilesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSecurityProfilesPager: + r"""Lists SecurityProfiles in a given organization and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_security_profiles(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest, dict]): + The request object. Request used with the + ListSecurityProfiles method. + parent (str): + Required. The project or organization and location from + which the SecurityProfiles should be listed, specified + in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfilesPager: + Response returned by the + ListSecurityProfiles method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.ListSecurityProfilesRequest + ): + request = security_profile_group_service.ListSecurityProfilesRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_security_profiles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSecurityProfilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.GetSecurityProfileRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfile: + r"""Gets details of a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_profile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetSecurityProfileRequest, dict]): + The request object. Request used by the + GetSecurityProfile method. + name (str): + Required. A name of the SecurityProfile to get. Must be + in the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SecurityProfile: + SecurityProfile is a resource that + defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.GetSecurityProfileRequest + ): + request = security_profile_group_service.GetSecurityProfileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_security_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.CreateSecurityProfileRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + security_profile: Optional[security_profile_group.SecurityProfile] = None, + security_profile_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new SecurityProfile in a given organization + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + # Make the request + operation = client.create_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateSecurityProfileRequest, dict]): + The request object. Request used by the + CreateSecurityProfile method. + parent (str): + Required. The parent resource of the SecurityProfile. + Must be in the format + ``projects|organizations/*/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile (google.cloud.network_security_v1alpha1.types.SecurityProfile): + Required. SecurityProfile resource to + be created. + + This corresponds to the ``security_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_profile_id (str): + Required. Short name of the SecurityProfile resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and + underscores, and should not start with a number. E.g. + "security_profile1". + + This corresponds to the ``security_profile_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfile` SecurityProfile is a resource that defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, security_profile, security_profile_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.CreateSecurityProfileRequest + ): + request = security_profile_group_service.CreateSecurityProfileRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_profile is not None: + request.security_profile = security_profile + if security_profile_id is not None: + request.security_profile_id = security_profile_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_security_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + security_profile_group.SecurityProfile, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.UpdateSecurityProfileRequest, dict] + ] = None, + *, + security_profile: Optional[security_profile_group.SecurityProfile] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileRequest( + ) + + # Make the request + operation = client.update_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileRequest, dict]): + The request object. Request used by the + UpdateSecurityProfile method. + security_profile (google.cloud.network_security_v1alpha1.types.SecurityProfile): + Required. Updated SecurityProfile + resource. + + This corresponds to the ``security_profile`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfile resource by the + update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_security_v1alpha1.types.SecurityProfile` SecurityProfile is a resource that defines the behavior for one of many + ProfileTypes. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [security_profile, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.UpdateSecurityProfileRequest + ): + request = security_profile_group_service.UpdateSecurityProfileRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_profile is not None: + request.security_profile = security_profile + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_security_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_profile.name", request.security_profile.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + security_profile_group.SecurityProfile, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_security_profile( + self, + request: Optional[ + Union[security_profile_group_service.DeleteSecurityProfileRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single SecurityProfile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileRequest, dict]): + The request object. Request used by the + DeleteSecurityProfile method. + name (str): + Required. A name of the SecurityProfile to delete. Must + be in the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, security_profile_group_service.DeleteSecurityProfileRequest + ): + request = security_profile_group_service.DeleteSecurityProfileRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_security_profile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OrganizationSecurityProfileGroupServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("OrganizationSecurityProfileGroupServiceClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/pagers.py new file mode 100644 index 000000000000..5a6d9a3af7b6 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/pagers.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + + +class ListSecurityProfileGroupsPager: + """A pager for iterating through ``list_security_profile_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``security_profile_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSecurityProfileGroups`` requests and continue to iterate + through the ``security_profile_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., security_profile_group_service.ListSecurityProfileGroupsResponse + ], + request: security_profile_group_service.ListSecurityProfileGroupsRequest, + response: security_profile_group_service.ListSecurityProfileGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = security_profile_group_service.ListSecurityProfileGroupsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[security_profile_group_service.ListSecurityProfileGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[security_profile_group.SecurityProfileGroup]: + for page in self.pages: + yield from page.security_profile_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecurityProfileGroupsAsyncPager: + """A pager for iterating through ``list_security_profile_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``security_profile_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSecurityProfileGroups`` requests and continue to iterate + through the ``security_profile_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[security_profile_group_service.ListSecurityProfileGroupsResponse], + ], + request: security_profile_group_service.ListSecurityProfileGroupsRequest, + response: security_profile_group_service.ListSecurityProfileGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = security_profile_group_service.ListSecurityProfileGroupsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + security_profile_group_service.ListSecurityProfileGroupsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[security_profile_group.SecurityProfileGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.security_profile_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecurityProfilesPager: + """A pager for iterating through ``list_security_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``security_profiles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSecurityProfiles`` requests and continue to iterate + through the ``security_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., security_profile_group_service.ListSecurityProfilesResponse + ], + request: security_profile_group_service.ListSecurityProfilesRequest, + response: security_profile_group_service.ListSecurityProfilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = security_profile_group_service.ListSecurityProfilesRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[security_profile_group_service.ListSecurityProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[security_profile_group.SecurityProfile]: + for page in self.pages: + yield from page.security_profiles + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecurityProfilesAsyncPager: + """A pager for iterating through ``list_security_profiles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``security_profiles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSecurityProfiles`` requests and continue to iterate + through the ``security_profiles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[security_profile_group_service.ListSecurityProfilesResponse] + ], + request: security_profile_group_service.ListSecurityProfilesRequest, + response: security_profile_group_service.ListSecurityProfilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSecurityProfilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = security_profile_group_service.ListSecurityProfilesRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[security_profile_group_service.ListSecurityProfilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[security_profile_group.SecurityProfile]: + async def async_generator(): + async for page in self.pages: + for response in page.security_profiles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/README.rst new file mode 100644 index 000000000000..e4253eda3c51 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`OrganizationSecurityProfileGroupServiceTransport` is the ABC for all transports. +- public child `OrganizationSecurityProfileGroupServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseOrganizationSecurityProfileGroupServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `OrganizationSecurityProfileGroupServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/__init__.py new file mode 100644 index 000000000000..cdb17f07f8b4 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OrganizationSecurityProfileGroupServiceTransport +from .grpc import OrganizationSecurityProfileGroupServiceGrpcTransport +from .grpc_asyncio import OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport +from .rest import ( + OrganizationSecurityProfileGroupServiceRestInterceptor, + OrganizationSecurityProfileGroupServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[OrganizationSecurityProfileGroupServiceTransport]] +_transport_registry["grpc"] = OrganizationSecurityProfileGroupServiceGrpcTransport +_transport_registry[ + "grpc_asyncio" +] = OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OrganizationSecurityProfileGroupServiceRestTransport + +__all__ = ( + "OrganizationSecurityProfileGroupServiceTransport", + "OrganizationSecurityProfileGroupServiceGrpcTransport", + "OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport", + "OrganizationSecurityProfileGroupServiceRestTransport", + "OrganizationSecurityProfileGroupServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/base.py new file mode 100644 index 000000000000..4d4b3948644e --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/base.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class OrganizationSecurityProfileGroupServiceTransport(abc.ABC): + """Abstract transport class for OrganizationSecurityProfileGroupService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_security_profile_groups: gapic_v1.method.wrap_method( + self.list_security_profile_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_security_profile_group: gapic_v1.method.wrap_method( + self.get_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.create_security_profile_group: gapic_v1.method.wrap_method( + self.create_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.update_security_profile_group: gapic_v1.method.wrap_method( + self.update_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_security_profile_group: gapic_v1.method.wrap_method( + self.delete_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.list_security_profiles: gapic_v1.method.wrap_method( + self.list_security_profiles, + default_timeout=None, + client_info=client_info, + ), + self.get_security_profile: gapic_v1.method.wrap_method( + self.get_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.create_security_profile: gapic_v1.method.wrap_method( + self.create_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.update_security_profile: gapic_v1.method.wrap_method( + self.update_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.delete_security_profile: gapic_v1.method.wrap_method( + self.delete_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_security_profile_groups( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfileGroupsRequest], + Union[ + security_profile_group_service.ListSecurityProfileGroupsResponse, + Awaitable[security_profile_group_service.ListSecurityProfileGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileGroupRequest], + Union[ + security_profile_group.SecurityProfileGroup, + Awaitable[security_profile_group.SecurityProfileGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_security_profiles( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfilesRequest], + Union[ + security_profile_group_service.ListSecurityProfilesResponse, + Awaitable[security_profile_group_service.ListSecurityProfilesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileRequest], + Union[ + security_profile_group.SecurityProfile, + Awaitable[security_profile_group.SecurityProfile], + ], + ]: + raise NotImplementedError() + + @property + def create_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OrganizationSecurityProfileGroupServiceTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc.py new file mode 100644 index 000000000000..d702ba9c84e8 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc.py @@ -0,0 +1,850 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + +from .base import DEFAULT_CLIENT_INFO, OrganizationSecurityProfileGroupServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class OrganizationSecurityProfileGroupServiceGrpcTransport( + OrganizationSecurityProfileGroupServiceTransport +): + """gRPC backend transport for OrganizationSecurityProfileGroupService. + + Organization SecurityProfileGroup is created under + organization. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_security_profile_groups( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfileGroupsRequest], + security_profile_group_service.ListSecurityProfileGroupsResponse, + ]: + r"""Return a callable for the list security profile groups method over gRPC. + + Lists SecurityProfileGroups in a given organization + and location. + + Returns: + Callable[[~.ListSecurityProfileGroupsRequest], + ~.ListSecurityProfileGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_profile_groups" not in self._stubs: + self._stubs[ + "list_security_profile_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/ListSecurityProfileGroups", + request_serializer=security_profile_group_service.ListSecurityProfileGroupsRequest.serialize, + response_deserializer=security_profile_group_service.ListSecurityProfileGroupsResponse.deserialize, + ) + return self._stubs["list_security_profile_groups"] + + @property + def get_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileGroupRequest], + security_profile_group.SecurityProfileGroup, + ]: + r"""Return a callable for the get security profile group method over gRPC. + + Gets details of a single SecurityProfileGroup. + + Returns: + Callable[[~.GetSecurityProfileGroupRequest], + ~.SecurityProfileGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_profile_group" not in self._stubs: + self._stubs[ + "get_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/GetSecurityProfileGroup", + request_serializer=security_profile_group_service.GetSecurityProfileGroupRequest.serialize, + response_deserializer=security_profile_group.SecurityProfileGroup.deserialize, + ) + return self._stubs["get_security_profile_group"] + + @property + def create_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create security profile group method over gRPC. + + Creates a new SecurityProfileGroup in a given + organization and location. + + Returns: + Callable[[~.CreateSecurityProfileGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_profile_group" not in self._stubs: + self._stubs[ + "create_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/CreateSecurityProfileGroup", + request_serializer=security_profile_group_service.CreateSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_security_profile_group"] + + @property + def update_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update security profile group method over gRPC. + + Updates the parameters of a single + SecurityProfileGroup. + + Returns: + Callable[[~.UpdateSecurityProfileGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_profile_group" not in self._stubs: + self._stubs[ + "update_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/UpdateSecurityProfileGroup", + request_serializer=security_profile_group_service.UpdateSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_security_profile_group"] + + @property + def delete_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete security profile group method over gRPC. + + Deletes a single SecurityProfileGroup. + + Returns: + Callable[[~.DeleteSecurityProfileGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_profile_group" not in self._stubs: + self._stubs[ + "delete_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/DeleteSecurityProfileGroup", + request_serializer=security_profile_group_service.DeleteSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_security_profile_group"] + + @property + def list_security_profiles( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfilesRequest], + security_profile_group_service.ListSecurityProfilesResponse, + ]: + r"""Return a callable for the list security profiles method over gRPC. + + Lists SecurityProfiles in a given organization and + location. + + Returns: + Callable[[~.ListSecurityProfilesRequest], + ~.ListSecurityProfilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_profiles" not in self._stubs: + self._stubs["list_security_profiles"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/ListSecurityProfiles", + request_serializer=security_profile_group_service.ListSecurityProfilesRequest.serialize, + response_deserializer=security_profile_group_service.ListSecurityProfilesResponse.deserialize, + ) + return self._stubs["list_security_profiles"] + + @property + def get_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileRequest], + security_profile_group.SecurityProfile, + ]: + r"""Return a callable for the get security profile method over gRPC. + + Gets details of a single SecurityProfile. + + Returns: + Callable[[~.GetSecurityProfileRequest], + ~.SecurityProfile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_profile" not in self._stubs: + self._stubs["get_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/GetSecurityProfile", + request_serializer=security_profile_group_service.GetSecurityProfileRequest.serialize, + response_deserializer=security_profile_group.SecurityProfile.deserialize, + ) + return self._stubs["get_security_profile"] + + @property + def create_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create security profile method over gRPC. + + Creates a new SecurityProfile in a given organization + and location. + + Returns: + Callable[[~.CreateSecurityProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_profile" not in self._stubs: + self._stubs["create_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/CreateSecurityProfile", + request_serializer=security_profile_group_service.CreateSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_security_profile"] + + @property + def update_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update security profile method over gRPC. + + Updates the parameters of a single SecurityProfile. + + Returns: + Callable[[~.UpdateSecurityProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_profile" not in self._stubs: + self._stubs["update_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/UpdateSecurityProfile", + request_serializer=security_profile_group_service.UpdateSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_security_profile"] + + @property + def delete_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete security profile method over gRPC. + + Deletes a single SecurityProfile. + + Returns: + Callable[[~.DeleteSecurityProfileRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_profile" not in self._stubs: + self._stubs["delete_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/DeleteSecurityProfile", + request_serializer=security_profile_group_service.DeleteSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_security_profile"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OrganizationSecurityProfileGroupServiceGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..816a689973db --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/grpc_asyncio.py @@ -0,0 +1,963 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + +from .base import DEFAULT_CLIENT_INFO, OrganizationSecurityProfileGroupServiceTransport +from .grpc import OrganizationSecurityProfileGroupServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport( + OrganizationSecurityProfileGroupServiceTransport +): + """gRPC AsyncIO backend transport for OrganizationSecurityProfileGroupService. + + Organization SecurityProfileGroup is created under + organization. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_security_profile_groups( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfileGroupsRequest], + Awaitable[security_profile_group_service.ListSecurityProfileGroupsResponse], + ]: + r"""Return a callable for the list security profile groups method over gRPC. + + Lists SecurityProfileGroups in a given organization + and location. + + Returns: + Callable[[~.ListSecurityProfileGroupsRequest], + Awaitable[~.ListSecurityProfileGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_profile_groups" not in self._stubs: + self._stubs[ + "list_security_profile_groups" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/ListSecurityProfileGroups", + request_serializer=security_profile_group_service.ListSecurityProfileGroupsRequest.serialize, + response_deserializer=security_profile_group_service.ListSecurityProfileGroupsResponse.deserialize, + ) + return self._stubs["list_security_profile_groups"] + + @property + def get_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileGroupRequest], + Awaitable[security_profile_group.SecurityProfileGroup], + ]: + r"""Return a callable for the get security profile group method over gRPC. + + Gets details of a single SecurityProfileGroup. + + Returns: + Callable[[~.GetSecurityProfileGroupRequest], + Awaitable[~.SecurityProfileGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_profile_group" not in self._stubs: + self._stubs[ + "get_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/GetSecurityProfileGroup", + request_serializer=security_profile_group_service.GetSecurityProfileGroupRequest.serialize, + response_deserializer=security_profile_group.SecurityProfileGroup.deserialize, + ) + return self._stubs["get_security_profile_group"] + + @property + def create_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create security profile group method over gRPC. + + Creates a new SecurityProfileGroup in a given + organization and location. + + Returns: + Callable[[~.CreateSecurityProfileGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_profile_group" not in self._stubs: + self._stubs[ + "create_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/CreateSecurityProfileGroup", + request_serializer=security_profile_group_service.CreateSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_security_profile_group"] + + @property + def update_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update security profile group method over gRPC. + + Updates the parameters of a single + SecurityProfileGroup. + + Returns: + Callable[[~.UpdateSecurityProfileGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_profile_group" not in self._stubs: + self._stubs[ + "update_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/UpdateSecurityProfileGroup", + request_serializer=security_profile_group_service.UpdateSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_security_profile_group"] + + @property + def delete_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileGroupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete security profile group method over gRPC. + + Deletes a single SecurityProfileGroup. + + Returns: + Callable[[~.DeleteSecurityProfileGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_profile_group" not in self._stubs: + self._stubs[ + "delete_security_profile_group" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/DeleteSecurityProfileGroup", + request_serializer=security_profile_group_service.DeleteSecurityProfileGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_security_profile_group"] + + @property + def list_security_profiles( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfilesRequest], + Awaitable[security_profile_group_service.ListSecurityProfilesResponse], + ]: + r"""Return a callable for the list security profiles method over gRPC. + + Lists SecurityProfiles in a given organization and + location. + + Returns: + Callable[[~.ListSecurityProfilesRequest], + Awaitable[~.ListSecurityProfilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_profiles" not in self._stubs: + self._stubs["list_security_profiles"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/ListSecurityProfiles", + request_serializer=security_profile_group_service.ListSecurityProfilesRequest.serialize, + response_deserializer=security_profile_group_service.ListSecurityProfilesResponse.deserialize, + ) + return self._stubs["list_security_profiles"] + + @property + def get_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileRequest], + Awaitable[security_profile_group.SecurityProfile], + ]: + r"""Return a callable for the get security profile method over gRPC. + + Gets details of a single SecurityProfile. + + Returns: + Callable[[~.GetSecurityProfileRequest], + Awaitable[~.SecurityProfile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_profile" not in self._stubs: + self._stubs["get_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/GetSecurityProfile", + request_serializer=security_profile_group_service.GetSecurityProfileRequest.serialize, + response_deserializer=security_profile_group.SecurityProfile.deserialize, + ) + return self._stubs["get_security_profile"] + + @property + def create_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create security profile method over gRPC. + + Creates a new SecurityProfile in a given organization + and location. + + Returns: + Callable[[~.CreateSecurityProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_profile" not in self._stubs: + self._stubs["create_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/CreateSecurityProfile", + request_serializer=security_profile_group_service.CreateSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_security_profile"] + + @property + def update_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update security profile method over gRPC. + + Updates the parameters of a single SecurityProfile. + + Returns: + Callable[[~.UpdateSecurityProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_profile" not in self._stubs: + self._stubs["update_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/UpdateSecurityProfile", + request_serializer=security_profile_group_service.UpdateSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_security_profile"] + + @property + def delete_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete security profile method over gRPC. + + Deletes a single SecurityProfile. + + Returns: + Callable[[~.DeleteSecurityProfileRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_profile" not in self._stubs: + self._stubs["delete_security_profile"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService/DeleteSecurityProfile", + request_serializer=security_profile_group_service.DeleteSecurityProfileRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_security_profile"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_security_profile_groups: self._wrap_method( + self.list_security_profile_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_security_profile_group: self._wrap_method( + self.get_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.create_security_profile_group: self._wrap_method( + self.create_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.update_security_profile_group: self._wrap_method( + self.update_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_security_profile_group: self._wrap_method( + self.delete_security_profile_group, + default_timeout=None, + client_info=client_info, + ), + self.list_security_profiles: self._wrap_method( + self.list_security_profiles, + default_timeout=None, + client_info=client_info, + ), + self.get_security_profile: self._wrap_method( + self.get_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.create_security_profile: self._wrap_method( + self.create_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.update_security_profile: self._wrap_method( + self.update_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.delete_security_profile: self._wrap_method( + self.delete_security_profile, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest.py new file mode 100644 index 000000000000..468bb8e4dcfe --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest.py @@ -0,0 +1,3994 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseOrganizationSecurityProfileGroupServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class OrganizationSecurityProfileGroupServiceRestInterceptor: + """Interceptor for OrganizationSecurityProfileGroupService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OrganizationSecurityProfileGroupServiceRestTransport. + + .. code-block:: python + class MyCustomOrganizationSecurityProfileGroupServiceInterceptor(OrganizationSecurityProfileGroupServiceRestInterceptor): + def pre_create_security_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_security_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_security_profile_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_security_profile_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_security_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_security_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_security_profile_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_security_profile_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_security_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_security_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_security_profile_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_security_profile_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_security_profile_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_security_profile_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_security_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_security_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_security_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_security_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_security_profile_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_security_profile_group(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OrganizationSecurityProfileGroupServiceRestTransport(interceptor=MyCustomOrganizationSecurityProfileGroupServiceInterceptor()) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + + """ + + def pre_create_security_profile( + self, + request: security_profile_group_service.CreateSecurityProfileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.CreateSecurityProfileRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_security_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_create_security_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_security_profile + + DEPRECATED. Please use the `post_create_security_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_create_security_profile` interceptor runs + before the `post_create_security_profile_with_metadata` interceptor. + """ + return response + + def post_create_security_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_security_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_create_security_profile_with_metadata` + interceptor in new development instead of the `post_create_security_profile` interceptor. + When both interceptors are used, this `post_create_security_profile_with_metadata` interceptor runs after the + `post_create_security_profile` interceptor. The (possibly modified) response returned by + `post_create_security_profile` will be passed to + `post_create_security_profile_with_metadata`. + """ + return response, metadata + + def pre_create_security_profile_group( + self, + request: security_profile_group_service.CreateSecurityProfileGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.CreateSecurityProfileGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_security_profile_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_create_security_profile_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_security_profile_group + + DEPRECATED. Please use the `post_create_security_profile_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_create_security_profile_group` interceptor runs + before the `post_create_security_profile_group_with_metadata` interceptor. + """ + return response + + def post_create_security_profile_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_security_profile_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_create_security_profile_group_with_metadata` + interceptor in new development instead of the `post_create_security_profile_group` interceptor. + When both interceptors are used, this `post_create_security_profile_group_with_metadata` interceptor runs after the + `post_create_security_profile_group` interceptor. The (possibly modified) response returned by + `post_create_security_profile_group` will be passed to + `post_create_security_profile_group_with_metadata`. + """ + return response, metadata + + def pre_delete_security_profile( + self, + request: security_profile_group_service.DeleteSecurityProfileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.DeleteSecurityProfileRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_security_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_delete_security_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_security_profile + + DEPRECATED. Please use the `post_delete_security_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_delete_security_profile` interceptor runs + before the `post_delete_security_profile_with_metadata` interceptor. + """ + return response + + def post_delete_security_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_security_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_delete_security_profile_with_metadata` + interceptor in new development instead of the `post_delete_security_profile` interceptor. + When both interceptors are used, this `post_delete_security_profile_with_metadata` interceptor runs after the + `post_delete_security_profile` interceptor. The (possibly modified) response returned by + `post_delete_security_profile` will be passed to + `post_delete_security_profile_with_metadata`. + """ + return response, metadata + + def pre_delete_security_profile_group( + self, + request: security_profile_group_service.DeleteSecurityProfileGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.DeleteSecurityProfileGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_security_profile_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_delete_security_profile_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_security_profile_group + + DEPRECATED. Please use the `post_delete_security_profile_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_delete_security_profile_group` interceptor runs + before the `post_delete_security_profile_group_with_metadata` interceptor. + """ + return response + + def post_delete_security_profile_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_security_profile_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_delete_security_profile_group_with_metadata` + interceptor in new development instead of the `post_delete_security_profile_group` interceptor. + When both interceptors are used, this `post_delete_security_profile_group_with_metadata` interceptor runs after the + `post_delete_security_profile_group` interceptor. The (possibly modified) response returned by + `post_delete_security_profile_group` will be passed to + `post_delete_security_profile_group_with_metadata`. + """ + return response, metadata + + def pre_get_security_profile( + self, + request: security_profile_group_service.GetSecurityProfileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.GetSecurityProfileRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_security_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_get_security_profile( + self, response: security_profile_group.SecurityProfile + ) -> security_profile_group.SecurityProfile: + """Post-rpc interceptor for get_security_profile + + DEPRECATED. Please use the `post_get_security_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_get_security_profile` interceptor runs + before the `post_get_security_profile_with_metadata` interceptor. + """ + return response + + def post_get_security_profile_with_metadata( + self, + response: security_profile_group.SecurityProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group.SecurityProfile, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_security_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_get_security_profile_with_metadata` + interceptor in new development instead of the `post_get_security_profile` interceptor. + When both interceptors are used, this `post_get_security_profile_with_metadata` interceptor runs after the + `post_get_security_profile` interceptor. The (possibly modified) response returned by + `post_get_security_profile` will be passed to + `post_get_security_profile_with_metadata`. + """ + return response, metadata + + def pre_get_security_profile_group( + self, + request: security_profile_group_service.GetSecurityProfileGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.GetSecurityProfileGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_security_profile_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_get_security_profile_group( + self, response: security_profile_group.SecurityProfileGroup + ) -> security_profile_group.SecurityProfileGroup: + """Post-rpc interceptor for get_security_profile_group + + DEPRECATED. Please use the `post_get_security_profile_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_get_security_profile_group` interceptor runs + before the `post_get_security_profile_group_with_metadata` interceptor. + """ + return response + + def post_get_security_profile_group_with_metadata( + self, + response: security_profile_group.SecurityProfileGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group.SecurityProfileGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_security_profile_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_get_security_profile_group_with_metadata` + interceptor in new development instead of the `post_get_security_profile_group` interceptor. + When both interceptors are used, this `post_get_security_profile_group_with_metadata` interceptor runs after the + `post_get_security_profile_group` interceptor. The (possibly modified) response returned by + `post_get_security_profile_group` will be passed to + `post_get_security_profile_group_with_metadata`. + """ + return response, metadata + + def pre_list_security_profile_groups( + self, + request: security_profile_group_service.ListSecurityProfileGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.ListSecurityProfileGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_security_profile_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_list_security_profile_groups( + self, response: security_profile_group_service.ListSecurityProfileGroupsResponse + ) -> security_profile_group_service.ListSecurityProfileGroupsResponse: + """Post-rpc interceptor for list_security_profile_groups + + DEPRECATED. Please use the `post_list_security_profile_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_list_security_profile_groups` interceptor runs + before the `post_list_security_profile_groups_with_metadata` interceptor. + """ + return response + + def post_list_security_profile_groups_with_metadata( + self, + response: security_profile_group_service.ListSecurityProfileGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.ListSecurityProfileGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_security_profile_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_list_security_profile_groups_with_metadata` + interceptor in new development instead of the `post_list_security_profile_groups` interceptor. + When both interceptors are used, this `post_list_security_profile_groups_with_metadata` interceptor runs after the + `post_list_security_profile_groups` interceptor. The (possibly modified) response returned by + `post_list_security_profile_groups` will be passed to + `post_list_security_profile_groups_with_metadata`. + """ + return response, metadata + + def pre_list_security_profiles( + self, + request: security_profile_group_service.ListSecurityProfilesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.ListSecurityProfilesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_security_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_list_security_profiles( + self, response: security_profile_group_service.ListSecurityProfilesResponse + ) -> security_profile_group_service.ListSecurityProfilesResponse: + """Post-rpc interceptor for list_security_profiles + + DEPRECATED. Please use the `post_list_security_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_list_security_profiles` interceptor runs + before the `post_list_security_profiles_with_metadata` interceptor. + """ + return response + + def post_list_security_profiles_with_metadata( + self, + response: security_profile_group_service.ListSecurityProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.ListSecurityProfilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_security_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_list_security_profiles_with_metadata` + interceptor in new development instead of the `post_list_security_profiles` interceptor. + When both interceptors are used, this `post_list_security_profiles_with_metadata` interceptor runs after the + `post_list_security_profiles` interceptor. The (possibly modified) response returned by + `post_list_security_profiles` will be passed to + `post_list_security_profiles_with_metadata`. + """ + return response, metadata + + def pre_update_security_profile( + self, + request: security_profile_group_service.UpdateSecurityProfileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.UpdateSecurityProfileRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_security_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_update_security_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_security_profile + + DEPRECATED. Please use the `post_update_security_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_update_security_profile` interceptor runs + before the `post_update_security_profile_with_metadata` interceptor. + """ + return response + + def post_update_security_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_security_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_update_security_profile_with_metadata` + interceptor in new development instead of the `post_update_security_profile` interceptor. + When both interceptors are used, this `post_update_security_profile_with_metadata` interceptor runs after the + `post_update_security_profile` interceptor. The (possibly modified) response returned by + `post_update_security_profile` will be passed to + `post_update_security_profile_with_metadata`. + """ + return response, metadata + + def pre_update_security_profile_group( + self, + request: security_profile_group_service.UpdateSecurityProfileGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_profile_group_service.UpdateSecurityProfileGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_security_profile_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_update_security_profile_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_security_profile_group + + DEPRECATED. Please use the `post_update_security_profile_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. This `post_update_security_profile_group` interceptor runs + before the `post_update_security_profile_group_with_metadata` interceptor. + """ + return response + + def post_update_security_profile_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_security_profile_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationSecurityProfileGroupService server but before it is returned to user code. + + We recommend only using this `post_update_security_profile_group_with_metadata` + interceptor in new development instead of the `post_update_security_profile_group` interceptor. + When both interceptors are used, this `post_update_security_profile_group_with_metadata` interceptor runs after the + `post_update_security_profile_group` interceptor. The (possibly modified) response returned by + `post_update_security_profile_group` will be passed to + `post_update_security_profile_group_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationSecurityProfileGroupService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the OrganizationSecurityProfileGroupService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OrganizationSecurityProfileGroupServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OrganizationSecurityProfileGroupServiceRestInterceptor + + +class OrganizationSecurityProfileGroupServiceRestTransport( + _BaseOrganizationSecurityProfileGroupServiceRestTransport +): + """REST backend synchronous transport for OrganizationSecurityProfileGroupService. + + Organization SecurityProfileGroup is created under + organization. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ + OrganizationSecurityProfileGroupServiceRestInterceptor + ] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or OrganizationSecurityProfileGroupServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateSecurityProfile( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.CreateSecurityProfile" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: security_profile_group_service.CreateSecurityProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create security profile method over HTTP. + + Args: + request (~.security_profile_group_service.CreateSecurityProfileRequest): + The request object. Request used by the + CreateSecurityProfile method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_security_profile( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.CreateSecurityProfile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "CreateSecurityProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._CreateSecurityProfile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_security_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_security_profile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.create_security_profile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "CreateSecurityProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSecurityProfileGroup( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.CreateSecurityProfileGroup" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: security_profile_group_service.CreateSecurityProfileGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create security profile + group method over HTTP. + + Args: + request (~.security_profile_group_service.CreateSecurityProfileGroupRequest): + The request object. Request used by the + CreateSecurityProfileGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_security_profile_group( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.CreateSecurityProfileGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "CreateSecurityProfileGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._CreateSecurityProfileGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_security_profile_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_security_profile_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.create_security_profile_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "CreateSecurityProfileGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSecurityProfile( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfile, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.DeleteSecurityProfile" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.DeleteSecurityProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete security profile method over HTTP. + + Args: + request (~.security_profile_group_service.DeleteSecurityProfileRequest): + The request object. Request used by the + DeleteSecurityProfile method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfile._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_security_profile( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfile._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfile._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.DeleteSecurityProfile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "DeleteSecurityProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._DeleteSecurityProfile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_security_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_security_profile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.delete_security_profile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "DeleteSecurityProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSecurityProfileGroup( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfileGroup, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.DeleteSecurityProfileGroup" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.DeleteSecurityProfileGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete security profile + group method over HTTP. + + Args: + request (~.security_profile_group_service.DeleteSecurityProfileGroupRequest): + The request object. Request used by the + DeleteSecurityProfileGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfileGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_security_profile_group( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfileGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfileGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.DeleteSecurityProfileGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "DeleteSecurityProfileGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._DeleteSecurityProfileGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_security_profile_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_security_profile_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.delete_security_profile_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "DeleteSecurityProfileGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSecurityProfile( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfile, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.GetSecurityProfile" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.GetSecurityProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfile: + r"""Call the get security profile method over HTTP. + + Args: + request (~.security_profile_group_service.GetSecurityProfileRequest): + The request object. Request used by the + GetSecurityProfile method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.security_profile_group.SecurityProfile: + SecurityProfile is a resource that + defines the behavior for one of many + ProfileTypes. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfile._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_security_profile( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfile._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfile._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.GetSecurityProfile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetSecurityProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._GetSecurityProfile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_profile_group.SecurityProfile() + pb_resp = security_profile_group.SecurityProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_security_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_security_profile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = security_profile_group.SecurityProfile.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.get_security_profile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetSecurityProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSecurityProfileGroup( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfileGroup, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.GetSecurityProfileGroup" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.GetSecurityProfileGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group.SecurityProfileGroup: + r"""Call the get security profile + group method over HTTP. + + Args: + request (~.security_profile_group_service.GetSecurityProfileGroupRequest): + The request object. Request used by the + GetSecurityProfileGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.security_profile_group.SecurityProfileGroup: + SecurityProfileGroup is a resource + that defines the behavior for various + ProfileTypes. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfileGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_security_profile_group( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfileGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfileGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.GetSecurityProfileGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetSecurityProfileGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._GetSecurityProfileGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_profile_group.SecurityProfileGroup() + pb_resp = security_profile_group.SecurityProfileGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_security_profile_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_security_profile_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + security_profile_group.SecurityProfileGroup.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.get_security_profile_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetSecurityProfileGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSecurityProfileGroups( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfileGroups, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.ListSecurityProfileGroups" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.ListSecurityProfileGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group_service.ListSecurityProfileGroupsResponse: + r"""Call the list security profile + groups method over HTTP. + + Args: + request (~.security_profile_group_service.ListSecurityProfileGroupsRequest): + The request object. Request used with the + ListSecurityProfileGroups method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.security_profile_group_service.ListSecurityProfileGroupsResponse: + Response returned by the + ListSecurityProfileGroups method. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfileGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_security_profile_groups( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfileGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfileGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.ListSecurityProfileGroups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListSecurityProfileGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._ListSecurityProfileGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_profile_group_service.ListSecurityProfileGroupsResponse() + pb_resp = ( + security_profile_group_service.ListSecurityProfileGroupsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_security_profile_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_security_profile_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = security_profile_group_service.ListSecurityProfileGroupsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.list_security_profile_groups", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListSecurityProfileGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSecurityProfiles( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfiles, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.ListSecurityProfiles" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: security_profile_group_service.ListSecurityProfilesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> security_profile_group_service.ListSecurityProfilesResponse: + r"""Call the list security profiles method over HTTP. + + Args: + request (~.security_profile_group_service.ListSecurityProfilesRequest): + The request object. Request used with the + ListSecurityProfiles method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.security_profile_group_service.ListSecurityProfilesResponse: + Response returned by the + ListSecurityProfiles method. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfiles._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_security_profiles( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfiles._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfiles._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.ListSecurityProfiles", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListSecurityProfiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._ListSecurityProfiles._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_profile_group_service.ListSecurityProfilesResponse() + pb_resp = security_profile_group_service.ListSecurityProfilesResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_security_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_security_profiles_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = security_profile_group_service.ListSecurityProfilesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.list_security_profiles", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListSecurityProfiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateSecurityProfile( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.UpdateSecurityProfile" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: security_profile_group_service.UpdateSecurityProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update security profile method over HTTP. + + Args: + request (~.security_profile_group_service.UpdateSecurityProfileRequest): + The request object. Request used by the + UpdateSecurityProfile method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_security_profile( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.UpdateSecurityProfile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "UpdateSecurityProfile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._UpdateSecurityProfile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_security_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_security_profile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.update_security_profile", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "UpdateSecurityProfile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateSecurityProfileGroup( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.UpdateSecurityProfileGroup" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: security_profile_group_service.UpdateSecurityProfileGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update security profile + group method over HTTP. + + Args: + request (~.security_profile_group_service.UpdateSecurityProfileGroupRequest): + The request object. Request used by the + UpdateSecurityProfileGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_security_profile_group( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.UpdateSecurityProfileGroup", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "UpdateSecurityProfileGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._UpdateSecurityProfileGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_security_profile_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_security_profile_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.update_security_profile_group", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "UpdateSecurityProfileGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecurityProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.CreateSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecurityProfileGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSecurityProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.DeleteSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSecurityProfileGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileRequest], + security_profile_group.SecurityProfile, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSecurityProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.GetSecurityProfileGroupRequest], + security_profile_group.SecurityProfileGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSecurityProfileGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_security_profile_groups( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfileGroupsRequest], + security_profile_group_service.ListSecurityProfileGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSecurityProfileGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_security_profiles( + self, + ) -> Callable[ + [security_profile_group_service.ListSecurityProfilesRequest], + security_profile_group_service.ListSecurityProfilesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSecurityProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_security_profile( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSecurityProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_security_profile_group( + self, + ) -> Callable[ + [security_profile_group_service.UpdateSecurityProfileGroupRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSecurityProfileGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetLocation, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.GetLocation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListLocations, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.ListLocations" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetIamPolicy, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.GetIamPolicy" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseSetIamPolicy, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.SetIamPolicy" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseTestIamPermissions, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.TestIamPermissions" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCancelOperation, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.CancelOperation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteOperation, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.DeleteOperation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetOperation, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.GetOperation" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListOperations, + OrganizationSecurityProfileGroupServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationSecurityProfileGroupServiceRestTransport.ListOperations" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationSecurityProfileGroupServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OrganizationSecurityProfileGroupServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest_base.py new file mode 100644 index 000000000000..6269bda59509 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/organization_security_profile_group_service/transports/rest_base.py @@ -0,0 +1,964 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group, + security_profile_group_service, +) + +from .base import DEFAULT_CLIENT_INFO, OrganizationSecurityProfileGroupServiceTransport + + +class _BaseOrganizationSecurityProfileGroupServiceRestTransport( + OrganizationSecurityProfileGroupServiceTransport +): + """Base REST backend transport for OrganizationSecurityProfileGroupService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateSecurityProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "securityProfileId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/securityProfiles", + "body": "security_profile", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = security_profile_group_service.CreateSecurityProfileRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSecurityProfileGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "securityProfileGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/securityProfileGroups", + "body": "security_profile_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + security_profile_group_service.CreateSecurityProfileGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseCreateSecurityProfileGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSecurityProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/securityProfiles/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = security_profile_group_service.DeleteSecurityProfileRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSecurityProfileGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/securityProfileGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + security_profile_group_service.DeleteSecurityProfileGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseDeleteSecurityProfileGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSecurityProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/securityProfiles/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = security_profile_group_service.GetSecurityProfileRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSecurityProfileGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/securityProfileGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + security_profile_group_service.GetSecurityProfileGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseGetSecurityProfileGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSecurityProfileGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/securityProfileGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + security_profile_group_service.ListSecurityProfileGroupsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfileGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSecurityProfiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=organizations/*/locations/*}/securityProfiles", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = security_profile_group_service.ListSecurityProfilesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseListSecurityProfiles._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateSecurityProfile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{security_profile.name=organizations/*/locations/*/securityProfiles/*}", + "body": "security_profile", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = security_profile_group_service.UpdateSecurityProfileRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateSecurityProfileGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{security_profile_group.name=organizations/*/locations/*/securityProfileGroups/*}", + "body": "security_profile_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + security_profile_group_service.UpdateSecurityProfileGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationSecurityProfileGroupServiceRestTransport._BaseUpdateSecurityProfileGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseOrganizationSecurityProfileGroupServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/__init__.py new file mode 100644 index 000000000000..54a8f32c51f4 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SSEGatewayServiceAsyncClient +from .client import SSEGatewayServiceClient + +__all__ = ( + "SSEGatewayServiceClient", + "SSEGatewayServiceAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/async_client.py new file mode 100644 index 000000000000..e345c20513c9 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/async_client.py @@ -0,0 +1,1867 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_gateway_service import pagers +from google.cloud.network_security_v1alpha1.types import common, sse_gateway + +from .client import SSEGatewayServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SSEGatewayServiceTransport +from .transports.grpc_asyncio import SSEGatewayServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class SSEGatewayServiceAsyncClient: + """Service describing handlers for resources""" + + _client: SSEGatewayServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SSEGatewayServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SSEGatewayServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SSEGatewayServiceClient._DEFAULT_UNIVERSE + + partner_sse_gateway_path = staticmethod( + SSEGatewayServiceClient.partner_sse_gateway_path + ) + parse_partner_sse_gateway_path = staticmethod( + SSEGatewayServiceClient.parse_partner_sse_gateway_path + ) + sse_gateway_reference_path = staticmethod( + SSEGatewayServiceClient.sse_gateway_reference_path + ) + parse_sse_gateway_reference_path = staticmethod( + SSEGatewayServiceClient.parse_sse_gateway_reference_path + ) + common_billing_account_path = staticmethod( + SSEGatewayServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SSEGatewayServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SSEGatewayServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SSEGatewayServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SSEGatewayServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SSEGatewayServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SSEGatewayServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SSEGatewayServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SSEGatewayServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SSEGatewayServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSEGatewayServiceAsyncClient: The constructed client. + """ + return SSEGatewayServiceClient.from_service_account_info.__func__(SSEGatewayServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSEGatewayServiceAsyncClient: The constructed client. + """ + return SSEGatewayServiceClient.from_service_account_file.__func__(SSEGatewayServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SSEGatewayServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SSEGatewayServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SSEGatewayServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SSEGatewayServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SSEGatewayServiceTransport, + Callable[..., SSEGatewayServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sse gateway service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SSEGatewayServiceTransport,Callable[..., SSEGatewayServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SSEGatewayServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SSEGatewayServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "credentialsType": None, + }, + ) + + async def list_partner_sse_gateways( + self, + request: Optional[ + Union[sse_gateway.ListPartnerSSEGatewaysRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartnerSSEGatewaysAsyncPager: + r"""Lists PartnerSSEGateways in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_partner_sse_gateways(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSEGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_gateways(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest, dict]]): + The request object. Message for requesting list of + PartnerSSEGateways + parent (:class:`str`): + Required. Parent value for + ListPartnerSSEGatewaysRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListPartnerSSEGatewaysAsyncPager: + Message for response to listing + PartnerSSEGateways + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.ListPartnerSSEGatewaysRequest): + request = sse_gateway.ListPartnerSSEGatewaysRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_partner_sse_gateways + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPartnerSSEGatewaysAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_partner_sse_gateway( + self, + request: Optional[Union[sse_gateway.GetPartnerSSEGatewayRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.PartnerSSEGateway: + r"""Gets details of a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partner_sse_gateway(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetPartnerSSEGatewayRequest, dict]]): + The request object. Message for getting a + PartnerSSEGateway + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.PartnerSSEGateway: + Message describing PartnerSSEGateway + object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.GetPartnerSSEGatewayRequest): + request = sse_gateway.GetPartnerSSEGatewayRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.CreatePartnerSSEGatewayRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + partner_sse_gateway: Optional[sse_gateway.PartnerSSEGateway] = None, + partner_sse_gateway_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new PartnerSSEGateway in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.create_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreatePartnerSSEGatewayRequest, dict]]): + The request object. Message for creating a + PartnerSSEGateway + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_gateway (:class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway`): + Required. The resource being created + This corresponds to the ``partner_sse_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_gateway_id (:class:`str`): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and + partner_sse_gateway_id from the method_signature of + Create RPC + + This corresponds to the ``partner_sse_gateway_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway` + Message describing PartnerSSEGateway object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partner_sse_gateway, partner_sse_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.CreatePartnerSSEGatewayRequest): + request = sse_gateway.CreatePartnerSSEGatewayRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partner_sse_gateway is not None: + request.partner_sse_gateway = partner_sse_gateway + if partner_sse_gateway_id is not None: + request.partner_sse_gateway_id = partner_sse_gateway_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + sse_gateway.PartnerSSEGateway, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.DeletePartnerSSEGatewayRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeletePartnerSSEGatewayRequest, dict]]): + The request object. Message for deleting a + PartnerSSEGateway + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.DeletePartnerSSEGatewayRequest): + request = sse_gateway.DeletePartnerSSEGatewayRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.UpdatePartnerSSEGatewayRequest, dict] + ] = None, + *, + partner_sse_gateway: Optional[sse_gateway.PartnerSSEGateway] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_update_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.UpdatePartnerSSEGatewayRequest( + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.update_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.UpdatePartnerSSEGatewayRequest, dict]]): + The request object. Message for deleting a + PartnerSSEGateway + partner_sse_gateway (:class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway`): + Required. The resource being created + This corresponds to the ``partner_sse_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway` + Message describing PartnerSSEGateway object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [partner_sse_gateway, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.UpdatePartnerSSEGatewayRequest): + request = sse_gateway.UpdatePartnerSSEGatewayRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if partner_sse_gateway is not None: + request.partner_sse_gateway = partner_sse_gateway + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("partner_sse_gateway.name", request.partner_sse_gateway.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + sse_gateway.PartnerSSEGateway, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_sse_gateway_references( + self, + request: Optional[ + Union[sse_gateway.ListSSEGatewayReferencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSSEGatewayReferencesAsyncPager: + r"""Lists SSEGatewayReferences in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_sse_gateway_references(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSSEGatewayReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sse_gateway_references(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest, dict]]): + The request object. Message for requesting list of + SSEGatewayReferences + parent (:class:`str`): + Required. Parent value for + ListSSEGatewayReferencesRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListSSEGatewayReferencesAsyncPager: + Message for response to listing + SSEGatewayReferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.ListSSEGatewayReferencesRequest): + request = sse_gateway.ListSSEGatewayReferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sse_gateway_references + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSSEGatewayReferencesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sse_gateway_reference( + self, + request: Optional[ + Union[sse_gateway.GetSSEGatewayReferenceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.SSEGatewayReference: + r"""Gets details of a single SSEGatewayReference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_sse_gateway_reference(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sse_gateway_reference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetSSEGatewayReferenceRequest, dict]]): + The request object. Message for getting a + SSEGatewayReference + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SSEGatewayReference: + Message describing + SSEGatewayReference object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.GetSSEGatewayReferenceRequest): + request = sse_gateway.GetSSEGatewayReferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_sse_gateway_reference + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SSEGatewayServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("SSEGatewayServiceAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/client.py new file mode 100644 index 000000000000..0d6dac32b8fd --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/client.py @@ -0,0 +1,2326 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_gateway_service import pagers +from google.cloud.network_security_v1alpha1.types import common, sse_gateway + +from .transports.base import DEFAULT_CLIENT_INFO, SSEGatewayServiceTransport +from .transports.grpc import SSEGatewayServiceGrpcTransport +from .transports.grpc_asyncio import SSEGatewayServiceGrpcAsyncIOTransport +from .transports.rest import SSEGatewayServiceRestTransport + + +class SSEGatewayServiceClientMeta(type): + """Metaclass for the SSEGatewayService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SSEGatewayServiceTransport]] + _transport_registry["grpc"] = SSEGatewayServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SSEGatewayServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SSEGatewayServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SSEGatewayServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SSEGatewayServiceClient(metaclass=SSEGatewayServiceClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSEGatewayServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSEGatewayServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SSEGatewayServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SSEGatewayServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def partner_sse_gateway_path( + project: str, + location: str, + partner_sse_gateway: str, + ) -> str: + """Returns a fully-qualified partner_sse_gateway string.""" + return "projects/{project}/locations/{location}/partnerSSEGateways/{partner_sse_gateway}".format( + project=project, + location=location, + partner_sse_gateway=partner_sse_gateway, + ) + + @staticmethod + def parse_partner_sse_gateway_path(path: str) -> Dict[str, str]: + """Parses a partner_sse_gateway path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/partnerSSEGateways/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def sse_gateway_reference_path( + project: str, + location: str, + sse_gateway_reference: str, + ) -> str: + """Returns a fully-qualified sse_gateway_reference string.""" + return "projects/{project}/locations/{location}/sseGatewayReferences/{sse_gateway_reference}".format( + project=project, + location=location, + sse_gateway_reference=sse_gateway_reference, + ) + + @staticmethod + def parse_sse_gateway_reference_path(path: str) -> Dict[str, str]: + """Parses a sse_gateway_reference path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sseGatewayReferences/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SSEGatewayServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SSEGatewayServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SSEGatewayServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SSEGatewayServiceTransport, + Callable[..., SSEGatewayServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sse gateway service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SSEGatewayServiceTransport,Callable[..., SSEGatewayServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SSEGatewayServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SSEGatewayServiceClient._read_environment_variables() + self._client_cert_source = SSEGatewayServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SSEGatewayServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SSEGatewayServiceTransport) + if transport_provided: + # transport is a SSEGatewayServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SSEGatewayServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SSEGatewayServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SSEGatewayServiceTransport], + Callable[..., SSEGatewayServiceTransport], + ] = ( + SSEGatewayServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SSEGatewayServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "credentialsType": None, + }, + ) + + def list_partner_sse_gateways( + self, + request: Optional[ + Union[sse_gateway.ListPartnerSSEGatewaysRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartnerSSEGatewaysPager: + r"""Lists PartnerSSEGateways in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_partner_sse_gateways(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSEGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_gateways(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest, dict]): + The request object. Message for requesting list of + PartnerSSEGateways + parent (str): + Required. Parent value for + ListPartnerSSEGatewaysRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListPartnerSSEGatewaysPager: + Message for response to listing + PartnerSSEGateways + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.ListPartnerSSEGatewaysRequest): + request = sse_gateway.ListPartnerSSEGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_partner_sse_gateways + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPartnerSSEGatewaysPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_partner_sse_gateway( + self, + request: Optional[Union[sse_gateway.GetPartnerSSEGatewayRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.PartnerSSEGateway: + r"""Gets details of a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + response = client.get_partner_sse_gateway(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetPartnerSSEGatewayRequest, dict]): + The request object. Message for getting a + PartnerSSEGateway + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.PartnerSSEGateway: + Message describing PartnerSSEGateway + object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.GetPartnerSSEGatewayRequest): + request = sse_gateway.GetPartnerSSEGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_partner_sse_gateway] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.CreatePartnerSSEGatewayRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + partner_sse_gateway: Optional[sse_gateway.PartnerSSEGateway] = None, + partner_sse_gateway_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new PartnerSSEGateway in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.create_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreatePartnerSSEGatewayRequest, dict]): + The request object. Message for creating a + PartnerSSEGateway + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_gateway (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway): + Required. The resource being created + This corresponds to the ``partner_sse_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_gateway_id (str): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and + partner_sse_gateway_id from the method_signature of + Create RPC + + This corresponds to the ``partner_sse_gateway_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway` + Message describing PartnerSSEGateway object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partner_sse_gateway, partner_sse_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.CreatePartnerSSEGatewayRequest): + request = sse_gateway.CreatePartnerSSEGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partner_sse_gateway is not None: + request.partner_sse_gateway = partner_sse_gateway + if partner_sse_gateway_id is not None: + request.partner_sse_gateway_id = partner_sse_gateway_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + sse_gateway.PartnerSSEGateway, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.DeletePartnerSSEGatewayRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeletePartnerSSEGatewayRequest, dict]): + The request object. Message for deleting a + PartnerSSEGateway + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.DeletePartnerSSEGatewayRequest): + request = sse_gateway.DeletePartnerSSEGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_partner_sse_gateway( + self, + request: Optional[ + Union[sse_gateway.UpdatePartnerSSEGatewayRequest, dict] + ] = None, + *, + partner_sse_gateway: Optional[sse_gateway.PartnerSSEGateway] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a single PartnerSSEGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_update_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.UpdatePartnerSSEGatewayRequest( + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.update_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.UpdatePartnerSSEGatewayRequest, dict]): + The request object. Message for deleting a + PartnerSSEGateway + partner_sse_gateway (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway): + Required. The resource being created + This corresponds to the ``partner_sse_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSEGateway` + Message describing PartnerSSEGateway object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [partner_sse_gateway, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.UpdatePartnerSSEGatewayRequest): + request = sse_gateway.UpdatePartnerSSEGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if partner_sse_gateway is not None: + request.partner_sse_gateway = partner_sse_gateway + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_partner_sse_gateway + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("partner_sse_gateway.name", request.partner_sse_gateway.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + sse_gateway.PartnerSSEGateway, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_sse_gateway_references( + self, + request: Optional[ + Union[sse_gateway.ListSSEGatewayReferencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSSEGatewayReferencesPager: + r"""Lists SSEGatewayReferences in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_sse_gateway_references(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSSEGatewayReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sse_gateway_references(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest, dict]): + The request object. Message for requesting list of + SSEGatewayReferences + parent (str): + Required. Parent value for + ListSSEGatewayReferencesRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListSSEGatewayReferencesPager: + Message for response to listing + SSEGatewayReferences + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.ListSSEGatewayReferencesRequest): + request = sse_gateway.ListSSEGatewayReferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_sse_gateway_references + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSSEGatewayReferencesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sse_gateway_reference( + self, + request: Optional[ + Union[sse_gateway.GetSSEGatewayReferenceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.SSEGatewayReference: + r"""Gets details of a single SSEGatewayReference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_sse_gateway_reference(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_sse_gateway_reference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetSSEGatewayReferenceRequest, dict]): + The request object. Message for getting a + SSEGatewayReference + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SSEGatewayReference: + Message describing + SSEGatewayReference object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_gateway.GetSSEGatewayReferenceRequest): + request = sse_gateway.GetSSEGatewayReferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_sse_gateway_reference + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SSEGatewayServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("SSEGatewayServiceClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/pagers.py new file mode 100644 index 000000000000..1b29424ff7d1 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/pagers.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_gateway + + +class ListPartnerSSEGatewaysPager: + """A pager for iterating through ``list_partner_sse_gateways`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse` object, and + provides an ``__iter__`` method to iterate through its + ``partner_sse_gateways`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPartnerSSEGateways`` requests and continue to iterate + through the ``partner_sse_gateways`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., sse_gateway.ListPartnerSSEGatewaysResponse], + request: sse_gateway.ListPartnerSSEGatewaysRequest, + response: sse_gateway.ListPartnerSSEGatewaysResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_gateway.ListPartnerSSEGatewaysRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[sse_gateway.ListPartnerSSEGatewaysResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[sse_gateway.PartnerSSEGateway]: + for page in self.pages: + yield from page.partner_sse_gateways + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPartnerSSEGatewaysAsyncPager: + """A pager for iterating through ``list_partner_sse_gateways`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``partner_sse_gateways`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPartnerSSEGateways`` requests and continue to iterate + through the ``partner_sse_gateways`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[sse_gateway.ListPartnerSSEGatewaysResponse]], + request: sse_gateway.ListPartnerSSEGatewaysRequest, + response: sse_gateway.ListPartnerSSEGatewaysResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_gateway.ListPartnerSSEGatewaysRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[sse_gateway.ListPartnerSSEGatewaysResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[sse_gateway.PartnerSSEGateway]: + async def async_generator(): + async for page in self.pages: + for response in page.partner_sse_gateways: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSSEGatewayReferencesPager: + """A pager for iterating through ``list_sse_gateway_references`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sse_gateway_references`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSSEGatewayReferences`` requests and continue to iterate + through the ``sse_gateway_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., sse_gateway.ListSSEGatewayReferencesResponse], + request: sse_gateway.ListSSEGatewayReferencesRequest, + response: sse_gateway.ListSSEGatewayReferencesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_gateway.ListSSEGatewayReferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[sse_gateway.ListSSEGatewayReferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[sse_gateway.SSEGatewayReference]: + for page in self.pages: + yield from page.sse_gateway_references + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSSEGatewayReferencesAsyncPager: + """A pager for iterating through ``list_sse_gateway_references`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sse_gateway_references`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSSEGatewayReferences`` requests and continue to iterate + through the ``sse_gateway_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[sse_gateway.ListSSEGatewayReferencesResponse]], + request: sse_gateway.ListSSEGatewayReferencesRequest, + response: sse_gateway.ListSSEGatewayReferencesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_gateway.ListSSEGatewayReferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[sse_gateway.ListSSEGatewayReferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[sse_gateway.SSEGatewayReference]: + async def async_generator(): + async for page in self.pages: + for response in page.sse_gateway_references: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/README.rst new file mode 100644 index 000000000000..f3648b917151 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SSEGatewayServiceTransport` is the ABC for all transports. +- public child `SSEGatewayServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SSEGatewayServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSSEGatewayServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SSEGatewayServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/__init__.py new file mode 100644 index 000000000000..db8312bd4493 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SSEGatewayServiceTransport +from .grpc import SSEGatewayServiceGrpcTransport +from .grpc_asyncio import SSEGatewayServiceGrpcAsyncIOTransport +from .rest import SSEGatewayServiceRestInterceptor, SSEGatewayServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SSEGatewayServiceTransport]] +_transport_registry["grpc"] = SSEGatewayServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SSEGatewayServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SSEGatewayServiceRestTransport + +__all__ = ( + "SSEGatewayServiceTransport", + "SSEGatewayServiceGrpcTransport", + "SSEGatewayServiceGrpcAsyncIOTransport", + "SSEGatewayServiceRestTransport", + "SSEGatewayServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/base.py new file mode 100644 index 000000000000..2c273441723a --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/base.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import sse_gateway + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SSEGatewayServiceTransport(abc.ABC): + """Abstract transport class for SSEGatewayService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_partner_sse_gateways: gapic_v1.method.wrap_method( + self.list_partner_sse_gateways, + default_timeout=None, + client_info=client_info, + ), + self.get_partner_sse_gateway: gapic_v1.method.wrap_method( + self.get_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.create_partner_sse_gateway: gapic_v1.method.wrap_method( + self.create_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.delete_partner_sse_gateway: gapic_v1.method.wrap_method( + self.delete_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.update_partner_sse_gateway: gapic_v1.method.wrap_method( + self.update_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.list_sse_gateway_references: gapic_v1.method.wrap_method( + self.list_sse_gateway_references, + default_timeout=None, + client_info=client_info, + ), + self.get_sse_gateway_reference: gapic_v1.method.wrap_method( + self.get_sse_gateway_reference, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_partner_sse_gateways( + self, + ) -> Callable[ + [sse_gateway.ListPartnerSSEGatewaysRequest], + Union[ + sse_gateway.ListPartnerSSEGatewaysResponse, + Awaitable[sse_gateway.ListPartnerSSEGatewaysResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.GetPartnerSSEGatewayRequest], + Union[sse_gateway.PartnerSSEGateway, Awaitable[sse_gateway.PartnerSSEGateway]], + ]: + raise NotImplementedError() + + @property + def create_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.CreatePartnerSSEGatewayRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.DeletePartnerSSEGatewayRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.UpdatePartnerSSEGatewayRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_sse_gateway_references( + self, + ) -> Callable[ + [sse_gateway.ListSSEGatewayReferencesRequest], + Union[ + sse_gateway.ListSSEGatewayReferencesResponse, + Awaitable[sse_gateway.ListSSEGatewayReferencesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_sse_gateway_reference( + self, + ) -> Callable[ + [sse_gateway.GetSSEGatewayReferenceRequest], + Union[ + sse_gateway.SSEGatewayReference, Awaitable[sse_gateway.SSEGatewayReference] + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SSEGatewayServiceTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc.py new file mode 100644 index 000000000000..9f460d8717e6 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc.py @@ -0,0 +1,748 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_gateway + +from .base import DEFAULT_CLIENT_INFO, SSEGatewayServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SSEGatewayServiceGrpcTransport(SSEGatewayServiceTransport): + """gRPC backend transport for SSEGatewayService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_partner_sse_gateways( + self, + ) -> Callable[ + [sse_gateway.ListPartnerSSEGatewaysRequest], + sse_gateway.ListPartnerSSEGatewaysResponse, + ]: + r"""Return a callable for the list partner sse gateways method over gRPC. + + Lists PartnerSSEGateways in a given project and + location. + + Returns: + Callable[[~.ListPartnerSSEGatewaysRequest], + ~.ListPartnerSSEGatewaysResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_partner_sse_gateways" not in self._stubs: + self._stubs["list_partner_sse_gateways"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/ListPartnerSSEGateways", + request_serializer=sse_gateway.ListPartnerSSEGatewaysRequest.serialize, + response_deserializer=sse_gateway.ListPartnerSSEGatewaysResponse.deserialize, + ) + return self._stubs["list_partner_sse_gateways"] + + @property + def get_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.GetPartnerSSEGatewayRequest], sse_gateway.PartnerSSEGateway + ]: + r"""Return a callable for the get partner sse gateway method over gRPC. + + Gets details of a single PartnerSSEGateway. + + Returns: + Callable[[~.GetPartnerSSEGatewayRequest], + ~.PartnerSSEGateway]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_partner_sse_gateway" not in self._stubs: + self._stubs["get_partner_sse_gateway"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/GetPartnerSSEGateway", + request_serializer=sse_gateway.GetPartnerSSEGatewayRequest.serialize, + response_deserializer=sse_gateway.PartnerSSEGateway.deserialize, + ) + return self._stubs["get_partner_sse_gateway"] + + @property + def create_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.CreatePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create partner sse gateway method over gRPC. + + Creates a new PartnerSSEGateway in a given project + and location. + + Returns: + Callable[[~.CreatePartnerSSEGatewayRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partner_sse_gateway" not in self._stubs: + self._stubs[ + "create_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/CreatePartnerSSEGateway", + request_serializer=sse_gateway.CreatePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_partner_sse_gateway"] + + @property + def delete_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.DeletePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete partner sse gateway method over gRPC. + + Deletes a single PartnerSSEGateway. + + Returns: + Callable[[~.DeletePartnerSSEGatewayRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partner_sse_gateway" not in self._stubs: + self._stubs[ + "delete_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/DeletePartnerSSEGateway", + request_serializer=sse_gateway.DeletePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_partner_sse_gateway"] + + @property + def update_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.UpdatePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update partner sse gateway method over gRPC. + + Updates a single PartnerSSEGateway. + + Returns: + Callable[[~.UpdatePartnerSSEGatewayRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_partner_sse_gateway" not in self._stubs: + self._stubs[ + "update_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/UpdatePartnerSSEGateway", + request_serializer=sse_gateway.UpdatePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_partner_sse_gateway"] + + @property + def list_sse_gateway_references( + self, + ) -> Callable[ + [sse_gateway.ListSSEGatewayReferencesRequest], + sse_gateway.ListSSEGatewayReferencesResponse, + ]: + r"""Return a callable for the list sse gateway references method over gRPC. + + Lists SSEGatewayReferences in a given project and + location. + + Returns: + Callable[[~.ListSSEGatewayReferencesRequest], + ~.ListSSEGatewayReferencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sse_gateway_references" not in self._stubs: + self._stubs[ + "list_sse_gateway_references" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/ListSSEGatewayReferences", + request_serializer=sse_gateway.ListSSEGatewayReferencesRequest.serialize, + response_deserializer=sse_gateway.ListSSEGatewayReferencesResponse.deserialize, + ) + return self._stubs["list_sse_gateway_references"] + + @property + def get_sse_gateway_reference( + self, + ) -> Callable[ + [sse_gateway.GetSSEGatewayReferenceRequest], sse_gateway.SSEGatewayReference + ]: + r"""Return a callable for the get sse gateway reference method over gRPC. + + Gets details of a single SSEGatewayReference. + + Returns: + Callable[[~.GetSSEGatewayReferenceRequest], + ~.SSEGatewayReference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sse_gateway_reference" not in self._stubs: + self._stubs["get_sse_gateway_reference"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/GetSSEGatewayReference", + request_serializer=sse_gateway.GetSSEGatewayReferenceRequest.serialize, + response_deserializer=sse_gateway.SSEGatewayReference.deserialize, + ) + return self._stubs["get_sse_gateway_reference"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SSEGatewayServiceGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..99efe15f2a83 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/grpc_asyncio.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_gateway + +from .base import DEFAULT_CLIENT_INFO, SSEGatewayServiceTransport +from .grpc import SSEGatewayServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SSEGatewayServiceGrpcAsyncIOTransport(SSEGatewayServiceTransport): + """gRPC AsyncIO backend transport for SSEGatewayService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_partner_sse_gateways( + self, + ) -> Callable[ + [sse_gateway.ListPartnerSSEGatewaysRequest], + Awaitable[sse_gateway.ListPartnerSSEGatewaysResponse], + ]: + r"""Return a callable for the list partner sse gateways method over gRPC. + + Lists PartnerSSEGateways in a given project and + location. + + Returns: + Callable[[~.ListPartnerSSEGatewaysRequest], + Awaitable[~.ListPartnerSSEGatewaysResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_partner_sse_gateways" not in self._stubs: + self._stubs["list_partner_sse_gateways"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/ListPartnerSSEGateways", + request_serializer=sse_gateway.ListPartnerSSEGatewaysRequest.serialize, + response_deserializer=sse_gateway.ListPartnerSSEGatewaysResponse.deserialize, + ) + return self._stubs["list_partner_sse_gateways"] + + @property + def get_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.GetPartnerSSEGatewayRequest], + Awaitable[sse_gateway.PartnerSSEGateway], + ]: + r"""Return a callable for the get partner sse gateway method over gRPC. + + Gets details of a single PartnerSSEGateway. + + Returns: + Callable[[~.GetPartnerSSEGatewayRequest], + Awaitable[~.PartnerSSEGateway]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_partner_sse_gateway" not in self._stubs: + self._stubs["get_partner_sse_gateway"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/GetPartnerSSEGateway", + request_serializer=sse_gateway.GetPartnerSSEGatewayRequest.serialize, + response_deserializer=sse_gateway.PartnerSSEGateway.deserialize, + ) + return self._stubs["get_partner_sse_gateway"] + + @property + def create_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.CreatePartnerSSEGatewayRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create partner sse gateway method over gRPC. + + Creates a new PartnerSSEGateway in a given project + and location. + + Returns: + Callable[[~.CreatePartnerSSEGatewayRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partner_sse_gateway" not in self._stubs: + self._stubs[ + "create_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/CreatePartnerSSEGateway", + request_serializer=sse_gateway.CreatePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_partner_sse_gateway"] + + @property + def delete_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.DeletePartnerSSEGatewayRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete partner sse gateway method over gRPC. + + Deletes a single PartnerSSEGateway. + + Returns: + Callable[[~.DeletePartnerSSEGatewayRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partner_sse_gateway" not in self._stubs: + self._stubs[ + "delete_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/DeletePartnerSSEGateway", + request_serializer=sse_gateway.DeletePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_partner_sse_gateway"] + + @property + def update_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.UpdatePartnerSSEGatewayRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update partner sse gateway method over gRPC. + + Updates a single PartnerSSEGateway. + + Returns: + Callable[[~.UpdatePartnerSSEGatewayRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_partner_sse_gateway" not in self._stubs: + self._stubs[ + "update_partner_sse_gateway" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/UpdatePartnerSSEGateway", + request_serializer=sse_gateway.UpdatePartnerSSEGatewayRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_partner_sse_gateway"] + + @property + def list_sse_gateway_references( + self, + ) -> Callable[ + [sse_gateway.ListSSEGatewayReferencesRequest], + Awaitable[sse_gateway.ListSSEGatewayReferencesResponse], + ]: + r"""Return a callable for the list sse gateway references method over gRPC. + + Lists SSEGatewayReferences in a given project and + location. + + Returns: + Callable[[~.ListSSEGatewayReferencesRequest], + Awaitable[~.ListSSEGatewayReferencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sse_gateway_references" not in self._stubs: + self._stubs[ + "list_sse_gateway_references" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/ListSSEGatewayReferences", + request_serializer=sse_gateway.ListSSEGatewayReferencesRequest.serialize, + response_deserializer=sse_gateway.ListSSEGatewayReferencesResponse.deserialize, + ) + return self._stubs["list_sse_gateway_references"] + + @property + def get_sse_gateway_reference( + self, + ) -> Callable[ + [sse_gateway.GetSSEGatewayReferenceRequest], + Awaitable[sse_gateway.SSEGatewayReference], + ]: + r"""Return a callable for the get sse gateway reference method over gRPC. + + Gets details of a single SSEGatewayReference. + + Returns: + Callable[[~.GetSSEGatewayReferenceRequest], + Awaitable[~.SSEGatewayReference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sse_gateway_reference" not in self._stubs: + self._stubs["get_sse_gateway_reference"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSEGatewayService/GetSSEGatewayReference", + request_serializer=sse_gateway.GetSSEGatewayReferenceRequest.serialize, + response_deserializer=sse_gateway.SSEGatewayReference.deserialize, + ) + return self._stubs["get_sse_gateway_reference"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_partner_sse_gateways: self._wrap_method( + self.list_partner_sse_gateways, + default_timeout=None, + client_info=client_info, + ), + self.get_partner_sse_gateway: self._wrap_method( + self.get_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.create_partner_sse_gateway: self._wrap_method( + self.create_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.delete_partner_sse_gateway: self._wrap_method( + self.delete_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.update_partner_sse_gateway: self._wrap_method( + self.update_partner_sse_gateway, + default_timeout=None, + client_info=client_info, + ), + self.list_sse_gateway_references: self._wrap_method( + self.list_sse_gateway_references, + default_timeout=None, + client_info=client_info, + ), + self.get_sse_gateway_reference: self._wrap_method( + self.get_sse_gateway_reference, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SSEGatewayServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest.py new file mode 100644 index 000000000000..3ddcd4987120 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest.py @@ -0,0 +1,3262 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import sse_gateway + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseSSEGatewayServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SSEGatewayServiceRestInterceptor: + """Interceptor for SSEGatewayService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SSEGatewayServiceRestTransport. + + .. code-block:: python + class MyCustomSSEGatewayServiceInterceptor(SSEGatewayServiceRestInterceptor): + def pre_create_partner_sse_gateway(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_partner_sse_gateway(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_partner_sse_gateway(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_partner_sse_gateway(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_partner_sse_gateway(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_partner_sse_gateway(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_sse_gateway_reference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_sse_gateway_reference(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_partner_sse_gateways(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_partner_sse_gateways(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sse_gateway_references(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sse_gateway_references(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_partner_sse_gateway(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_partner_sse_gateway(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SSEGatewayServiceRestTransport(interceptor=MyCustomSSEGatewayServiceInterceptor()) + client = SSEGatewayServiceClient(transport=transport) + + + """ + + def pre_create_partner_sse_gateway( + self, + request: sse_gateway.CreatePartnerSSEGatewayRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.CreatePartnerSSEGatewayRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_partner_sse_gateway + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_create_partner_sse_gateway( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_partner_sse_gateway + + DEPRECATED. Please use the `post_create_partner_sse_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_create_partner_sse_gateway` interceptor runs + before the `post_create_partner_sse_gateway_with_metadata` interceptor. + """ + return response + + def post_create_partner_sse_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_partner_sse_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_create_partner_sse_gateway_with_metadata` + interceptor in new development instead of the `post_create_partner_sse_gateway` interceptor. + When both interceptors are used, this `post_create_partner_sse_gateway_with_metadata` interceptor runs after the + `post_create_partner_sse_gateway` interceptor. The (possibly modified) response returned by + `post_create_partner_sse_gateway` will be passed to + `post_create_partner_sse_gateway_with_metadata`. + """ + return response, metadata + + def pre_delete_partner_sse_gateway( + self, + request: sse_gateway.DeletePartnerSSEGatewayRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.DeletePartnerSSEGatewayRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_partner_sse_gateway + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_delete_partner_sse_gateway( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_partner_sse_gateway + + DEPRECATED. Please use the `post_delete_partner_sse_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_delete_partner_sse_gateway` interceptor runs + before the `post_delete_partner_sse_gateway_with_metadata` interceptor. + """ + return response + + def post_delete_partner_sse_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_partner_sse_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_delete_partner_sse_gateway_with_metadata` + interceptor in new development instead of the `post_delete_partner_sse_gateway` interceptor. + When both interceptors are used, this `post_delete_partner_sse_gateway_with_metadata` interceptor runs after the + `post_delete_partner_sse_gateway` interceptor. The (possibly modified) response returned by + `post_delete_partner_sse_gateway` will be passed to + `post_delete_partner_sse_gateway_with_metadata`. + """ + return response, metadata + + def pre_get_partner_sse_gateway( + self, + request: sse_gateway.GetPartnerSSEGatewayRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.GetPartnerSSEGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_partner_sse_gateway + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_get_partner_sse_gateway( + self, response: sse_gateway.PartnerSSEGateway + ) -> sse_gateway.PartnerSSEGateway: + """Post-rpc interceptor for get_partner_sse_gateway + + DEPRECATED. Please use the `post_get_partner_sse_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_get_partner_sse_gateway` interceptor runs + before the `post_get_partner_sse_gateway_with_metadata` interceptor. + """ + return response + + def post_get_partner_sse_gateway_with_metadata( + self, + response: sse_gateway.PartnerSSEGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_gateway.PartnerSSEGateway, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_partner_sse_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_get_partner_sse_gateway_with_metadata` + interceptor in new development instead of the `post_get_partner_sse_gateway` interceptor. + When both interceptors are used, this `post_get_partner_sse_gateway_with_metadata` interceptor runs after the + `post_get_partner_sse_gateway` interceptor. The (possibly modified) response returned by + `post_get_partner_sse_gateway` will be passed to + `post_get_partner_sse_gateway_with_metadata`. + """ + return response, metadata + + def pre_get_sse_gateway_reference( + self, + request: sse_gateway.GetSSEGatewayReferenceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.GetSSEGatewayReferenceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_sse_gateway_reference + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_get_sse_gateway_reference( + self, response: sse_gateway.SSEGatewayReference + ) -> sse_gateway.SSEGatewayReference: + """Post-rpc interceptor for get_sse_gateway_reference + + DEPRECATED. Please use the `post_get_sse_gateway_reference_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_get_sse_gateway_reference` interceptor runs + before the `post_get_sse_gateway_reference_with_metadata` interceptor. + """ + return response + + def post_get_sse_gateway_reference_with_metadata( + self, + response: sse_gateway.SSEGatewayReference, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.SSEGatewayReference, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_sse_gateway_reference + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_get_sse_gateway_reference_with_metadata` + interceptor in new development instead of the `post_get_sse_gateway_reference` interceptor. + When both interceptors are used, this `post_get_sse_gateway_reference_with_metadata` interceptor runs after the + `post_get_sse_gateway_reference` interceptor. The (possibly modified) response returned by + `post_get_sse_gateway_reference` will be passed to + `post_get_sse_gateway_reference_with_metadata`. + """ + return response, metadata + + def pre_list_partner_sse_gateways( + self, + request: sse_gateway.ListPartnerSSEGatewaysRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.ListPartnerSSEGatewaysRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_partner_sse_gateways + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_list_partner_sse_gateways( + self, response: sse_gateway.ListPartnerSSEGatewaysResponse + ) -> sse_gateway.ListPartnerSSEGatewaysResponse: + """Post-rpc interceptor for list_partner_sse_gateways + + DEPRECATED. Please use the `post_list_partner_sse_gateways_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_list_partner_sse_gateways` interceptor runs + before the `post_list_partner_sse_gateways_with_metadata` interceptor. + """ + return response + + def post_list_partner_sse_gateways_with_metadata( + self, + response: sse_gateway.ListPartnerSSEGatewaysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.ListPartnerSSEGatewaysResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_partner_sse_gateways + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_list_partner_sse_gateways_with_metadata` + interceptor in new development instead of the `post_list_partner_sse_gateways` interceptor. + When both interceptors are used, this `post_list_partner_sse_gateways_with_metadata` interceptor runs after the + `post_list_partner_sse_gateways` interceptor. The (possibly modified) response returned by + `post_list_partner_sse_gateways` will be passed to + `post_list_partner_sse_gateways_with_metadata`. + """ + return response, metadata + + def pre_list_sse_gateway_references( + self, + request: sse_gateway.ListSSEGatewayReferencesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.ListSSEGatewayReferencesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_sse_gateway_references + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_list_sse_gateway_references( + self, response: sse_gateway.ListSSEGatewayReferencesResponse + ) -> sse_gateway.ListSSEGatewayReferencesResponse: + """Post-rpc interceptor for list_sse_gateway_references + + DEPRECATED. Please use the `post_list_sse_gateway_references_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_list_sse_gateway_references` interceptor runs + before the `post_list_sse_gateway_references_with_metadata` interceptor. + """ + return response + + def post_list_sse_gateway_references_with_metadata( + self, + response: sse_gateway.ListSSEGatewayReferencesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.ListSSEGatewayReferencesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sse_gateway_references + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_list_sse_gateway_references_with_metadata` + interceptor in new development instead of the `post_list_sse_gateway_references` interceptor. + When both interceptors are used, this `post_list_sse_gateway_references_with_metadata` interceptor runs after the + `post_list_sse_gateway_references` interceptor. The (possibly modified) response returned by + `post_list_sse_gateway_references` will be passed to + `post_list_sse_gateway_references_with_metadata`. + """ + return response, metadata + + def pre_update_partner_sse_gateway( + self, + request: sse_gateway.UpdatePartnerSSEGatewayRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_gateway.UpdatePartnerSSEGatewayRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_partner_sse_gateway + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_update_partner_sse_gateway( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_partner_sse_gateway + + DEPRECATED. Please use the `post_update_partner_sse_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. This `post_update_partner_sse_gateway` interceptor runs + before the `post_update_partner_sse_gateway_with_metadata` interceptor. + """ + return response + + def post_update_partner_sse_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_partner_sse_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSEGatewayService server but before it is returned to user code. + + We recommend only using this `post_update_partner_sse_gateway_with_metadata` + interceptor in new development instead of the `post_update_partner_sse_gateway` interceptor. + When both interceptors are used, this `post_update_partner_sse_gateway_with_metadata` interceptor runs after the + `post_update_partner_sse_gateway` interceptor. The (possibly modified) response returned by + `post_update_partner_sse_gateway` will be passed to + `post_update_partner_sse_gateway_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSEGatewayService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SSEGatewayService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SSEGatewayServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SSEGatewayServiceRestInterceptor + + +class SSEGatewayServiceRestTransport(_BaseSSEGatewayServiceRestTransport): + """REST backend synchronous transport for SSEGatewayService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SSEGatewayServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SSEGatewayServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreatePartnerSSEGateway( + _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.CreatePartnerSSEGateway") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: sse_gateway.CreatePartnerSSEGatewayRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create partner sse + gateway method over HTTP. + + Args: + request (~.sse_gateway.CreatePartnerSSEGatewayRequest): + The request object. Message for creating a + PartnerSSEGateway + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_partner_sse_gateway( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway._get_transcoded_request( + http_options, request + ) + + body = _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.CreatePartnerSSEGateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "CreatePartnerSSEGateway", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._CreatePartnerSSEGateway._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_partner_sse_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_partner_sse_gateway_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.create_partner_sse_gateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "CreatePartnerSSEGateway", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeletePartnerSSEGateway( + _BaseSSEGatewayServiceRestTransport._BaseDeletePartnerSSEGateway, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.DeletePartnerSSEGateway") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_gateway.DeletePartnerSSEGatewayRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete partner sse + gateway method over HTTP. + + Args: + request (~.sse_gateway.DeletePartnerSSEGatewayRequest): + The request object. Message for deleting a + PartnerSSEGateway + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseDeletePartnerSSEGateway._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_partner_sse_gateway( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseDeletePartnerSSEGateway._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseDeletePartnerSSEGateway._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.DeletePartnerSSEGateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "DeletePartnerSSEGateway", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._DeletePartnerSSEGateway._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_partner_sse_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_partner_sse_gateway_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.delete_partner_sse_gateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "DeletePartnerSSEGateway", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetPartnerSSEGateway( + _BaseSSEGatewayServiceRestTransport._BaseGetPartnerSSEGateway, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.GetPartnerSSEGateway") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_gateway.GetPartnerSSEGatewayRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.PartnerSSEGateway: + r"""Call the get partner sse gateway method over HTTP. + + Args: + request (~.sse_gateway.GetPartnerSSEGatewayRequest): + The request object. Message for getting a + PartnerSSEGateway + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_gateway.PartnerSSEGateway: + Message describing PartnerSSEGateway + object + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseGetPartnerSSEGateway._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_partner_sse_gateway( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseGetPartnerSSEGateway._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseGetPartnerSSEGateway._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.GetPartnerSSEGateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetPartnerSSEGateway", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._GetPartnerSSEGateway._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_gateway.PartnerSSEGateway() + pb_resp = sse_gateway.PartnerSSEGateway.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_partner_sse_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_sse_gateway_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_gateway.PartnerSSEGateway.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.get_partner_sse_gateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetPartnerSSEGateway", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSSEGatewayReference( + _BaseSSEGatewayServiceRestTransport._BaseGetSSEGatewayReference, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.GetSSEGatewayReference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_gateway.GetSSEGatewayReferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.SSEGatewayReference: + r"""Call the get sse gateway reference method over HTTP. + + Args: + request (~.sse_gateway.GetSSEGatewayReferenceRequest): + The request object. Message for getting a + SSEGatewayReference + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_gateway.SSEGatewayReference: + Message describing + SSEGatewayReference object + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseGetSSEGatewayReference._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_sse_gateway_reference( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseGetSSEGatewayReference._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseGetSSEGatewayReference._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.GetSSEGatewayReference", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetSSEGatewayReference", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._GetSSEGatewayReference._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_gateway.SSEGatewayReference() + pb_resp = sse_gateway.SSEGatewayReference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_sse_gateway_reference(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sse_gateway_reference_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_gateway.SSEGatewayReference.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.get_sse_gateway_reference", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetSSEGatewayReference", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListPartnerSSEGateways( + _BaseSSEGatewayServiceRestTransport._BaseListPartnerSSEGateways, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.ListPartnerSSEGateways") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_gateway.ListPartnerSSEGatewaysRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.ListPartnerSSEGatewaysResponse: + r"""Call the list partner sse gateways method over HTTP. + + Args: + request (~.sse_gateway.ListPartnerSSEGatewaysRequest): + The request object. Message for requesting list of + PartnerSSEGateways + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_gateway.ListPartnerSSEGatewaysResponse: + Message for response to listing + PartnerSSEGateways + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseListPartnerSSEGateways._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_partner_sse_gateways( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseListPartnerSSEGateways._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseListPartnerSSEGateways._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.ListPartnerSSEGateways", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListPartnerSSEGateways", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._ListPartnerSSEGateways._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_gateway.ListPartnerSSEGatewaysResponse() + pb_resp = sse_gateway.ListPartnerSSEGatewaysResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_partner_sse_gateways(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_partner_sse_gateways_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + sse_gateway.ListPartnerSSEGatewaysResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.list_partner_sse_gateways", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListPartnerSSEGateways", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSSEGatewayReferences( + _BaseSSEGatewayServiceRestTransport._BaseListSSEGatewayReferences, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.ListSSEGatewayReferences") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_gateway.ListSSEGatewayReferencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_gateway.ListSSEGatewayReferencesResponse: + r"""Call the list sse gateway + references method over HTTP. + + Args: + request (~.sse_gateway.ListSSEGatewayReferencesRequest): + The request object. Message for requesting list of + SSEGatewayReferences + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_gateway.ListSSEGatewayReferencesResponse: + Message for response to listing + SSEGatewayReferences + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseListSSEGatewayReferences._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_sse_gateway_references( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseListSSEGatewayReferences._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseListSSEGatewayReferences._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.ListSSEGatewayReferences", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListSSEGatewayReferences", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._ListSSEGatewayReferences._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_gateway.ListSSEGatewayReferencesResponse() + pb_resp = sse_gateway.ListSSEGatewayReferencesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sse_gateway_references(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sse_gateway_references_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + sse_gateway.ListSSEGatewayReferencesResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.list_sse_gateway_references", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListSSEGatewayReferences", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdatePartnerSSEGateway( + _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.UpdatePartnerSSEGateway") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: sse_gateway.UpdatePartnerSSEGatewayRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update partner sse + gateway method over HTTP. + + Args: + request (~.sse_gateway.UpdatePartnerSSEGatewayRequest): + The request object. Message for deleting a + PartnerSSEGateway + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_partner_sse_gateway( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway._get_transcoded_request( + http_options, request + ) + + body = _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.UpdatePartnerSSEGateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "UpdatePartnerSSEGateway", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSEGatewayServiceRestTransport._UpdatePartnerSSEGateway._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_partner_sse_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_partner_sse_gateway_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.update_partner_sse_gateway", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "UpdatePartnerSSEGateway", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.CreatePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePartnerSSEGateway(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.DeletePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePartnerSSEGateway(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.GetPartnerSSEGatewayRequest], sse_gateway.PartnerSSEGateway + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPartnerSSEGateway(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_sse_gateway_reference( + self, + ) -> Callable[ + [sse_gateway.GetSSEGatewayReferenceRequest], sse_gateway.SSEGatewayReference + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSSEGatewayReference(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_partner_sse_gateways( + self, + ) -> Callable[ + [sse_gateway.ListPartnerSSEGatewaysRequest], + sse_gateway.ListPartnerSSEGatewaysResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPartnerSSEGateways(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sse_gateway_references( + self, + ) -> Callable[ + [sse_gateway.ListSSEGatewayReferencesRequest], + sse_gateway.ListSSEGatewayReferencesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSSEGatewayReferences(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_partner_sse_gateway( + self, + ) -> Callable[ + [sse_gateway.UpdatePartnerSSEGatewayRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePartnerSSEGateway(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseSSEGatewayServiceRestTransport._BaseGetLocation, SSEGatewayServiceRestStub + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseSSEGatewayServiceRestTransport._BaseListLocations, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseSSEGatewayServiceRestTransport._BaseGetIamPolicy, SSEGatewayServiceRestStub + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseSSEGatewayServiceRestTransport._BaseSetIamPolicy, SSEGatewayServiceRestStub + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseSSEGatewayServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseSSEGatewayServiceRestTransport._BaseTestIamPermissions, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseSSEGatewayServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseSSEGatewayServiceRestTransport._BaseCancelOperation, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseSSEGatewayServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseSSEGatewayServiceRestTransport._BaseDeleteOperation, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseSSEGatewayServiceRestTransport._BaseGetOperation, SSEGatewayServiceRestStub + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseSSEGatewayServiceRestTransport._BaseListOperations, + SSEGatewayServiceRestStub, + ): + def __hash__(self): + return hash("SSEGatewayServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseSSEGatewayServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseSSEGatewayServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSEGatewayServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSEGatewayServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSEGatewayServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SSEGatewayServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest_base.py new file mode 100644 index 000000000000..7b1f571f953d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_gateway_service/transports/rest_base.py @@ -0,0 +1,762 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import sse_gateway + +from .base import DEFAULT_CLIENT_INFO, SSEGatewayServiceTransport + + +class _BaseSSEGatewayServiceRestTransport(SSEGatewayServiceTransport): + """Base REST backend transport for SSEGatewayService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreatePartnerSSEGateway: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "partnerSseGatewayId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/partnerSSEGateways", + "body": "partner_sse_gateway", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.CreatePartnerSSEGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseCreatePartnerSSEGateway._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeletePartnerSSEGateway: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/partnerSSEGateways/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.DeletePartnerSSEGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseDeletePartnerSSEGateway._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetPartnerSSEGateway: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/partnerSSEGateways/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.GetPartnerSSEGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseGetPartnerSSEGateway._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSSEGatewayReference: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/sseGatewayReferences/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.GetSSEGatewayReferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseGetSSEGatewayReference._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListPartnerSSEGateways: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/partnerSSEGateways", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.ListPartnerSSEGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseListPartnerSSEGateways._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSSEGatewayReferences: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/sseGatewayReferences", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.ListSSEGatewayReferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseListSSEGatewayReferences._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdatePartnerSSEGateway: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{partner_sse_gateway.name=projects/*/locations/*/partnerSSEGateways/*}", + "body": "partner_sse_gateway", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_gateway.UpdatePartnerSSEGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSEGatewayServiceRestTransport._BaseUpdatePartnerSSEGateway._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSSEGatewayServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/__init__.py new file mode 100644 index 000000000000..b606a1349689 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SSERealmServiceAsyncClient +from .client import SSERealmServiceClient + +__all__ = ( + "SSERealmServiceClient", + "SSERealmServiceAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/async_client.py new file mode 100644 index 000000000000..ba7f064e255d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/async_client.py @@ -0,0 +1,2528 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_realm_service import pagers +from google.cloud.network_security_v1alpha1.types import common, sse_realm + +from .client import SSERealmServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SSERealmServiceTransport +from .transports.grpc_asyncio import SSERealmServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class SSERealmServiceAsyncClient: + """Service describing handlers for resources""" + + _client: SSERealmServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SSERealmServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SSERealmServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SSERealmServiceClient._DEFAULT_UNIVERSE + + partner_sse_realm_path = staticmethod(SSERealmServiceClient.partner_sse_realm_path) + parse_partner_sse_realm_path = staticmethod( + SSERealmServiceClient.parse_partner_sse_realm_path + ) + sac_attachment_path = staticmethod(SSERealmServiceClient.sac_attachment_path) + parse_sac_attachment_path = staticmethod( + SSERealmServiceClient.parse_sac_attachment_path + ) + sac_realm_path = staticmethod(SSERealmServiceClient.sac_realm_path) + parse_sac_realm_path = staticmethod(SSERealmServiceClient.parse_sac_realm_path) + common_billing_account_path = staticmethod( + SSERealmServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SSERealmServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SSERealmServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SSERealmServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SSERealmServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SSERealmServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SSERealmServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SSERealmServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SSERealmServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SSERealmServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSERealmServiceAsyncClient: The constructed client. + """ + return SSERealmServiceClient.from_service_account_info.__func__(SSERealmServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSERealmServiceAsyncClient: The constructed client. + """ + return SSERealmServiceClient.from_service_account_file.__func__(SSERealmServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SSERealmServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SSERealmServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SSERealmServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SSERealmServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, SSERealmServiceTransport, Callable[..., SSERealmServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sse realm service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SSERealmServiceTransport,Callable[..., SSERealmServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SSERealmServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SSERealmServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "credentialsType": None, + }, + ) + + async def list_sac_realms( + self, + request: Optional[Union[sse_realm.ListSACRealmsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSACRealmsAsyncPager: + r"""Lists SACRealms in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_sac_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACRealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_realms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest, dict]]): + The request object. Request for ``ListSACRealms`` method. + parent (:class:`str`): + Required. The parent, in the form + ``projects/{project}/locations/global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACRealmsAsyncPager: + Response for ListSACRealms method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListSACRealmsRequest): + request = sse_realm.ListSACRealmsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sac_realms + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSACRealmsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sac_realm( + self, + request: Optional[Union[sse_realm.GetSACRealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACRealm: + r"""Returns the specified realm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACRealmRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sac_realm(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetSACRealmRequest, dict]]): + The request object. Request for ``GetSACRealm`` method. + name (:class:`str`): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SACRealm: + Represents a Secure Access Connect + (SAC) realm resource. + A Secure Access Connect realm + establishes a connection between your + Google Cloud project and an SSE service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetSACRealmRequest): + request = sse_realm.GetSACRealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_sac_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_sac_realm( + self, + request: Optional[Union[sse_realm.CreateSACRealmRequest, dict]] = None, + *, + parent: Optional[str] = None, + sac_realm: Optional[sse_realm.SACRealm] = None, + sac_realm_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SACRealm in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + # Make the request + operation = client.create_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateSACRealmRequest, dict]]): + The request object. Request for ``CreateSACRealm`` method. + parent (:class:`str`): + Required. The parent, in the form + ``projects/{project}/locations/global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_realm (:class:`google.cloud.network_security_v1alpha1.types.SACRealm`): + Required. The resource being created. + This corresponds to the ``sac_realm`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_realm_id (:class:`str`): + Required. ID of the created realm. The ID must be 1-63 + characters long, and comply with RFC1035. Specifically, + it must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means + the first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, + or digit, except the last character, which cannot be a + dash. + + This corresponds to the ``sac_realm_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.SACRealm` + Represents a Secure Access Connect (SAC) realm resource. + + A Secure Access Connect realm establishes a + connection between your Google Cloud project and an + SSE service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sac_realm, sac_realm_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreateSACRealmRequest): + request = sse_realm.CreateSACRealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sac_realm is not None: + request.sac_realm = sac_realm + if sac_realm_id is not None: + request.sac_realm_id = sac_realm_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sac_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + sse_realm.SACRealm, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_sac_realm( + self, + request: Optional[Union[sse_realm.DeleteSACRealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified realm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACRealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteSACRealmRequest, dict]]): + The request object. Request for ``DeleteSACRealm`` method. + name (:class:`str`): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeleteSACRealmRequest): + request = sse_realm.DeleteSACRealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sac_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_sac_attachments( + self, + request: Optional[Union[sse_realm.ListSACAttachmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSACAttachmentsAsyncPager: + r"""Lists SACAttachments in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_sac_attachments(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest, dict]]): + The request object. Request for ``ListSACAttachments`` method. + parent (:class:`str`): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACAttachmentsAsyncPager: + Response for ListSACAttachments method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListSACAttachmentsRequest): + request = sse_realm.ListSACAttachmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sac_attachments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSACAttachmentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sac_attachment( + self, + request: Optional[Union[sse_realm.GetSACAttachmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACAttachment: + r"""Returns the specified attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACAttachmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sac_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetSACAttachmentRequest, dict]]): + The request object. Request for ``GetSACAttachment`` method. + name (:class:`str`): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SACAttachment: + Represents a Secure Access Connect + (SAC) attachment resource. + A Secure Access Connect attachment + enables NCC Gateway to process traffic + with an SSE product. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetSACAttachmentRequest): + request = sse_realm.GetSACAttachmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_sac_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_sac_attachment( + self, + request: Optional[Union[sse_realm.CreateSACAttachmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + sac_attachment: Optional[sse_realm.SACAttachment] = None, + sac_attachment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new SACAttachment in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + sac_attachment = network_security_v1alpha1.SACAttachment() + sac_attachment.sac_realm = "sac_realm_value" + sac_attachment.ncc_gateway = "ncc_gateway_value" + + request = network_security_v1alpha1.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + sac_attachment=sac_attachment, + ) + + # Make the request + operation = client.create_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreateSACAttachmentRequest, dict]]): + The request object. Request for ``CreateSACAttachment`` method. + parent (:class:`str`): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_attachment (:class:`google.cloud.network_security_v1alpha1.types.SACAttachment`): + Required. The resource being created. + This corresponds to the ``sac_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_attachment_id (:class:`str`): + Required. ID of the created attachment. The ID must be + 1-63 characters long, and comply with RFC1035. + Specifically, it must be 1-63 characters long and match + the regular expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` + which means the first character must be a lowercase + letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, + which cannot be a dash. + + This corresponds to the ``sac_attachment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.SACAttachment` + Represents a Secure Access Connect (SAC) attachment + resource. + + A Secure Access Connect attachment enables NCC + Gateway to process traffic with an SSE product. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sac_attachment, sac_attachment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreateSACAttachmentRequest): + request = sse_realm.CreateSACAttachmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sac_attachment is not None: + request.sac_attachment = sac_attachment + if sac_attachment_id is not None: + request.sac_attachment_id = sac_attachment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sac_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + sse_realm.SACAttachment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_sac_attachment( + self, + request: Optional[Union[sse_realm.DeleteSACAttachmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeleteSACAttachmentRequest, dict]]): + The request object. Request for ``DeleteSACAttachment`` method. + name (:class:`str`): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeleteSACAttachmentRequest): + request = sse_realm.DeleteSACAttachmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sac_attachment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_partner_sse_realms( + self, + request: Optional[Union[sse_realm.ListPartnerSSERealmsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartnerSSERealmsAsyncPager: + r"""Lists PartnerSSERealms in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_list_partner_sse_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSERealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_realms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest, dict]]): + The request object. Message for requesting list of + PartnerSSERealms + parent (:class:`str`): + Required. Parent value for + ListPartnerSSERealmsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListPartnerSSERealmsAsyncPager: + Message for response to listing + PartnerSSERealms + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListPartnerSSERealmsRequest): + request = sse_realm.ListPartnerSSERealmsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_partner_sse_realms + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPartnerSSERealmsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_partner_sse_realm( + self, + request: Optional[Union[sse_realm.GetPartnerSSERealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.PartnerSSERealm: + r"""Gets details of a single PartnerSSERealm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_get_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partner_sse_realm(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.GetPartnerSSERealmRequest, dict]]): + The request object. Message for getting a PartnerSSERealm + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.PartnerSSERealm: + Message describing PartnerSSERealm + object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetPartnerSSERealmRequest): + request = sse_realm.GetPartnerSSERealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_partner_sse_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_partner_sse_realm( + self, + request: Optional[Union[sse_realm.CreatePartnerSSERealmRequest, dict]] = None, + *, + parent: Optional[str] = None, + partner_sse_realm: Optional[sse_realm.PartnerSSERealm] = None, + partner_sse_realm_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new PartnerSSERealm in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_create_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_realm = network_security_v1alpha1.PartnerSSERealm() + partner_sse_realm.pairing_key = "pairing_key_value" + + request = network_security_v1alpha1.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + partner_sse_realm=partner_sse_realm, + ) + + # Make the request + operation = client.create_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.CreatePartnerSSERealmRequest, dict]]): + The request object. Message for creating a + PartnerSSERealm + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_realm (:class:`google.cloud.network_security_v1alpha1.types.PartnerSSERealm`): + Required. The resource being created + This corresponds to the ``partner_sse_realm`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_realm_id (:class:`str`): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and + partner_sse_realm_id from the method_signature of Create + RPC + + This corresponds to the ``partner_sse_realm_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSERealm` + Message describing PartnerSSERealm object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partner_sse_realm, partner_sse_realm_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreatePartnerSSERealmRequest): + request = sse_realm.CreatePartnerSSERealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partner_sse_realm is not None: + request.partner_sse_realm = partner_sse_realm + if partner_sse_realm_id is not None: + request.partner_sse_realm_id = partner_sse_realm_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_partner_sse_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + sse_realm.PartnerSSERealm, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_partner_sse_realm( + self, + request: Optional[Union[sse_realm.DeletePartnerSSERealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single PartnerSSERealm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + async def sample_delete_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1alpha1.types.DeletePartnerSSERealmRequest, dict]]): + The request object. Message for deleting a + PartnerSSERealm + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeletePartnerSSERealmRequest): + request = sse_realm.DeletePartnerSSERealmRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_partner_sse_realm + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SSERealmServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("SSERealmServiceAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/client.py new file mode 100644 index 000000000000..182d9b66dc8e --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/client.py @@ -0,0 +1,2983 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_realm_service import pagers +from google.cloud.network_security_v1alpha1.types import common, sse_realm + +from .transports.base import DEFAULT_CLIENT_INFO, SSERealmServiceTransport +from .transports.grpc import SSERealmServiceGrpcTransport +from .transports.grpc_asyncio import SSERealmServiceGrpcAsyncIOTransport +from .transports.rest import SSERealmServiceRestTransport + + +class SSERealmServiceClientMeta(type): + """Metaclass for the SSERealmService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SSERealmServiceTransport]] + _transport_registry["grpc"] = SSERealmServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SSERealmServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SSERealmServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SSERealmServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SSERealmServiceClient(metaclass=SSERealmServiceClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSERealmServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SSERealmServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SSERealmServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SSERealmServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def partner_sse_realm_path( + project: str, + location: str, + partner_sse_realm: str, + ) -> str: + """Returns a fully-qualified partner_sse_realm string.""" + return "projects/{project}/locations/{location}/partnerSSERealms/{partner_sse_realm}".format( + project=project, + location=location, + partner_sse_realm=partner_sse_realm, + ) + + @staticmethod + def parse_partner_sse_realm_path(path: str) -> Dict[str, str]: + """Parses a partner_sse_realm path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/partnerSSERealms/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def sac_attachment_path( + project: str, + location: str, + sac_attachment: str, + ) -> str: + """Returns a fully-qualified sac_attachment string.""" + return "projects/{project}/locations/{location}/sacAttachments/{sac_attachment}".format( + project=project, + location=location, + sac_attachment=sac_attachment, + ) + + @staticmethod + def parse_sac_attachment_path(path: str) -> Dict[str, str]: + """Parses a sac_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sacAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def sac_realm_path( + project: str, + location: str, + sac_realm: str, + ) -> str: + """Returns a fully-qualified sac_realm string.""" + return "projects/{project}/locations/{location}/sacRealms/{sac_realm}".format( + project=project, + location=location, + sac_realm=sac_realm, + ) + + @staticmethod + def parse_sac_realm_path(path: str) -> Dict[str, str]: + """Parses a sac_realm path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sacRealms/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SSERealmServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SSERealmServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SSERealmServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, SSERealmServiceTransport, Callable[..., SSERealmServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sse realm service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SSERealmServiceTransport,Callable[..., SSERealmServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SSERealmServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SSERealmServiceClient._read_environment_variables() + self._client_cert_source = SSERealmServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SSERealmServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SSERealmServiceTransport) + if transport_provided: + # transport is a SSERealmServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SSERealmServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SSERealmServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SSERealmServiceTransport], Callable[..., SSERealmServiceTransport] + ] = ( + SSERealmServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SSERealmServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1alpha1.SSERealmServiceClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "credentialsType": None, + }, + ) + + def list_sac_realms( + self, + request: Optional[Union[sse_realm.ListSACRealmsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSACRealmsPager: + r"""Lists SACRealms in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_sac_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACRealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_realms(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest, dict]): + The request object. Request for ``ListSACRealms`` method. + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACRealmsPager: + Response for ListSACRealms method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListSACRealmsRequest): + request = sse_realm.ListSACRealmsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sac_realms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSACRealmsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sac_realm( + self, + request: Optional[Union[sse_realm.GetSACRealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACRealm: + r"""Returns the specified realm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACRealmRequest( + name="name_value", + ) + + # Make the request + response = client.get_sac_realm(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetSACRealmRequest, dict]): + The request object. Request for ``GetSACRealm`` method. + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SACRealm: + Represents a Secure Access Connect + (SAC) realm resource. + A Secure Access Connect realm + establishes a connection between your + Google Cloud project and an SSE service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetSACRealmRequest): + request = sse_realm.GetSACRealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sac_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_sac_realm( + self, + request: Optional[Union[sse_realm.CreateSACRealmRequest, dict]] = None, + *, + parent: Optional[str] = None, + sac_realm: Optional[sse_realm.SACRealm] = None, + sac_realm_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new SACRealm in a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + # Make the request + operation = client.create_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateSACRealmRequest, dict]): + The request object. Request for ``CreateSACRealm`` method. + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/global``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_realm (google.cloud.network_security_v1alpha1.types.SACRealm): + Required. The resource being created. + This corresponds to the ``sac_realm`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_realm_id (str): + Required. ID of the created realm. The ID must be 1-63 + characters long, and comply with RFC1035. Specifically, + it must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means + the first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, + or digit, except the last character, which cannot be a + dash. + + This corresponds to the ``sac_realm_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.SACRealm` + Represents a Secure Access Connect (SAC) realm resource. + + A Secure Access Connect realm establishes a + connection between your Google Cloud project and an + SSE service. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sac_realm, sac_realm_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreateSACRealmRequest): + request = sse_realm.CreateSACRealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sac_realm is not None: + request.sac_realm = sac_realm + if sac_realm_id is not None: + request.sac_realm_id = sac_realm_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_sac_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + sse_realm.SACRealm, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_sac_realm( + self, + request: Optional[Union[sse_realm.DeleteSACRealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes the specified realm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACRealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteSACRealmRequest, dict]): + The request object. Request for ``DeleteSACRealm`` method. + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeleteSACRealmRequest): + request = sse_realm.DeleteSACRealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sac_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_sac_attachments( + self, + request: Optional[Union[sse_realm.ListSACAttachmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSACAttachmentsPager: + r"""Lists SACAttachments in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_sac_attachments(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest, dict]): + The request object. Request for ``ListSACAttachments`` method. + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACAttachmentsPager: + Response for ListSACAttachments method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListSACAttachmentsRequest): + request = sse_realm.ListSACAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sac_attachments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSACAttachmentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sac_attachment( + self, + request: Optional[Union[sse_realm.GetSACAttachmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACAttachment: + r"""Returns the specified attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_sac_attachment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetSACAttachmentRequest, dict]): + The request object. Request for ``GetSACAttachment`` method. + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.SACAttachment: + Represents a Secure Access Connect + (SAC) attachment resource. + A Secure Access Connect attachment + enables NCC Gateway to process traffic + with an SSE product. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetSACAttachmentRequest): + request = sse_realm.GetSACAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sac_attachment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_sac_attachment( + self, + request: Optional[Union[sse_realm.CreateSACAttachmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + sac_attachment: Optional[sse_realm.SACAttachment] = None, + sac_attachment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new SACAttachment in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + sac_attachment = network_security_v1alpha1.SACAttachment() + sac_attachment.sac_realm = "sac_realm_value" + sac_attachment.ncc_gateway = "ncc_gateway_value" + + request = network_security_v1alpha1.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + sac_attachment=sac_attachment, + ) + + # Make the request + operation = client.create_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreateSACAttachmentRequest, dict]): + The request object. Request for ``CreateSACAttachment`` method. + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_attachment (google.cloud.network_security_v1alpha1.types.SACAttachment): + Required. The resource being created. + This corresponds to the ``sac_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sac_attachment_id (str): + Required. ID of the created attachment. The ID must be + 1-63 characters long, and comply with RFC1035. + Specifically, it must be 1-63 characters long and match + the regular expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` + which means the first character must be a lowercase + letter, and all following characters must be a dash, + lowercase letter, or digit, except the last character, + which cannot be a dash. + + This corresponds to the ``sac_attachment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.SACAttachment` + Represents a Secure Access Connect (SAC) attachment + resource. + + A Secure Access Connect attachment enables NCC + Gateway to process traffic with an SSE product. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sac_attachment, sac_attachment_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreateSACAttachmentRequest): + request = sse_realm.CreateSACAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sac_attachment is not None: + request.sac_attachment = sac_attachment + if sac_attachment_id is not None: + request.sac_attachment_id = sac_attachment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_sac_attachment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + sse_realm.SACAttachment, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_sac_attachment( + self, + request: Optional[Union[sse_realm.DeleteSACAttachmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes the specified attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeleteSACAttachmentRequest, dict]): + The request object. Request for ``DeleteSACAttachment`` method. + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeleteSACAttachmentRequest): + request = sse_realm.DeleteSACAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sac_attachment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_partner_sse_realms( + self, + request: Optional[Union[sse_realm.ListPartnerSSERealmsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPartnerSSERealmsPager: + r"""Lists PartnerSSERealms in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_list_partner_sse_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSERealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_realms(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest, dict]): + The request object. Message for requesting list of + PartnerSSERealms + parent (str): + Required. Parent value for + ListPartnerSSERealmsRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListPartnerSSERealmsPager: + Message for response to listing + PartnerSSERealms + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.ListPartnerSSERealmsRequest): + request = sse_realm.ListPartnerSSERealmsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_partner_sse_realms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPartnerSSERealmsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_partner_sse_realm( + self, + request: Optional[Union[sse_realm.GetPartnerSSERealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.PartnerSSERealm: + r"""Gets details of a single PartnerSSERealm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_get_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + response = client.get_partner_sse_realm(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.GetPartnerSSERealmRequest, dict]): + The request object. Message for getting a PartnerSSERealm + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1alpha1.types.PartnerSSERealm: + Message describing PartnerSSERealm + object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.GetPartnerSSERealmRequest): + request = sse_realm.GetPartnerSSERealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_partner_sse_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_partner_sse_realm( + self, + request: Optional[Union[sse_realm.CreatePartnerSSERealmRequest, dict]] = None, + *, + parent: Optional[str] = None, + partner_sse_realm: Optional[sse_realm.PartnerSSERealm] = None, + partner_sse_realm_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new PartnerSSERealm in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_create_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + partner_sse_realm = network_security_v1alpha1.PartnerSSERealm() + partner_sse_realm.pairing_key = "pairing_key_value" + + request = network_security_v1alpha1.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + partner_sse_realm=partner_sse_realm, + ) + + # Make the request + operation = client.create_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.CreatePartnerSSERealmRequest, dict]): + The request object. Message for creating a + PartnerSSERealm + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_realm (google.cloud.network_security_v1alpha1.types.PartnerSSERealm): + Required. The resource being created + This corresponds to the ``partner_sse_realm`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partner_sse_realm_id (str): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and + partner_sse_realm_id from the method_signature of Create + RPC + + This corresponds to the ``partner_sse_realm_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_security_v1alpha1.types.PartnerSSERealm` + Message describing PartnerSSERealm object + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, partner_sse_realm, partner_sse_realm_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.CreatePartnerSSERealmRequest): + request = sse_realm.CreatePartnerSSERealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partner_sse_realm is not None: + request.partner_sse_realm = partner_sse_realm + if partner_sse_realm_id is not None: + request.partner_sse_realm_id = partner_sse_realm_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_partner_sse_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + sse_realm.PartnerSSERealm, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_partner_sse_realm( + self, + request: Optional[Union[sse_realm.DeletePartnerSSERealmRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a single PartnerSSERealm. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1alpha1 + + def sample_delete_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1alpha1.types.DeletePartnerSSERealmRequest, dict]): + The request object. Message for deleting a + PartnerSSERealm + name (str): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, sse_realm.DeletePartnerSSERealmRequest): + request = sse_realm.DeletePartnerSSERealmRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_partner_sse_realm] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SSERealmServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("SSERealmServiceClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/pagers.py new file mode 100644 index 000000000000..04faa177134a --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/pagers.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_realm + + +class ListSACRealmsPager: + """A pager for iterating through ``list_sac_realms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sac_realms`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSACRealms`` requests and continue to iterate + through the ``sac_realms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., sse_realm.ListSACRealmsResponse], + request: sse_realm.ListSACRealmsRequest, + response: sse_realm.ListSACRealmsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListSACRealmsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[sse_realm.ListSACRealmsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[sse_realm.SACRealm]: + for page in self.pages: + yield from page.sac_realms + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSACRealmsAsyncPager: + """A pager for iterating through ``list_sac_realms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sac_realms`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSACRealms`` requests and continue to iterate + through the ``sac_realms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[sse_realm.ListSACRealmsResponse]], + request: sse_realm.ListSACRealmsRequest, + response: sse_realm.ListSACRealmsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSACRealmsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListSACRealmsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[sse_realm.ListSACRealmsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[sse_realm.SACRealm]: + async def async_generator(): + async for page in self.pages: + for response in page.sac_realms: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSACAttachmentsPager: + """A pager for iterating through ``list_sac_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sac_attachments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSACAttachments`` requests and continue to iterate + through the ``sac_attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., sse_realm.ListSACAttachmentsResponse], + request: sse_realm.ListSACAttachmentsRequest, + response: sse_realm.ListSACAttachmentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListSACAttachmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[sse_realm.ListSACAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[sse_realm.SACAttachment]: + for page in self.pages: + yield from page.sac_attachments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSACAttachmentsAsyncPager: + """A pager for iterating through ``list_sac_attachments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sac_attachments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSACAttachments`` requests and continue to iterate + through the ``sac_attachments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[sse_realm.ListSACAttachmentsResponse]], + request: sse_realm.ListSACAttachmentsRequest, + response: sse_realm.ListSACAttachmentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListSACAttachmentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListSACAttachmentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[sse_realm.ListSACAttachmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[sse_realm.SACAttachment]: + async def async_generator(): + async for page in self.pages: + for response in page.sac_attachments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPartnerSSERealmsPager: + """A pager for iterating through ``list_partner_sse_realms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``partner_sse_realms`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPartnerSSERealms`` requests and continue to iterate + through the ``partner_sse_realms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., sse_realm.ListPartnerSSERealmsResponse], + request: sse_realm.ListPartnerSSERealmsRequest, + response: sse_realm.ListPartnerSSERealmsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListPartnerSSERealmsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[sse_realm.ListPartnerSSERealmsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[sse_realm.PartnerSSERealm]: + for page in self.pages: + yield from page.partner_sse_realms + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPartnerSSERealmsAsyncPager: + """A pager for iterating through ``list_partner_sse_realms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``partner_sse_realms`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPartnerSSERealms`` requests and continue to iterate + through the ``partner_sse_realms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[sse_realm.ListPartnerSSERealmsResponse]], + request: sse_realm.ListPartnerSSERealmsRequest, + response: sse_realm.ListPartnerSSERealmsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest): + The initial request object. + response (google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = sse_realm.ListPartnerSSERealmsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[sse_realm.ListPartnerSSERealmsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[sse_realm.PartnerSSERealm]: + async def async_generator(): + async for page in self.pages: + for response in page.partner_sse_realms: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/README.rst new file mode 100644 index 000000000000..4b6d2f20d93b --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SSERealmServiceTransport` is the ABC for all transports. +- public child `SSERealmServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SSERealmServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSSERealmServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SSERealmServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/__init__.py new file mode 100644 index 000000000000..9c9d37351dee --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SSERealmServiceTransport +from .grpc import SSERealmServiceGrpcTransport +from .grpc_asyncio import SSERealmServiceGrpcAsyncIOTransport +from .rest import SSERealmServiceRestInterceptor, SSERealmServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SSERealmServiceTransport]] +_transport_registry["grpc"] = SSERealmServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SSERealmServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SSERealmServiceRestTransport + +__all__ = ( + "SSERealmServiceTransport", + "SSERealmServiceGrpcTransport", + "SSERealmServiceGrpcAsyncIOTransport", + "SSERealmServiceRestTransport", + "SSERealmServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/base.py new file mode 100644 index 000000000000..840ee7448ff6 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/base.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1alpha1 import gapic_version as package_version +from google.cloud.network_security_v1alpha1.types import sse_realm + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SSERealmServiceTransport(abc.ABC): + """Abstract transport class for SSERealmService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_sac_realms: gapic_v1.method.wrap_method( + self.list_sac_realms, + default_timeout=None, + client_info=client_info, + ), + self.get_sac_realm: gapic_v1.method.wrap_method( + self.get_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.create_sac_realm: gapic_v1.method.wrap_method( + self.create_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.delete_sac_realm: gapic_v1.method.wrap_method( + self.delete_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.list_sac_attachments: gapic_v1.method.wrap_method( + self.list_sac_attachments, + default_timeout=None, + client_info=client_info, + ), + self.get_sac_attachment: gapic_v1.method.wrap_method( + self.get_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.create_sac_attachment: gapic_v1.method.wrap_method( + self.create_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.delete_sac_attachment: gapic_v1.method.wrap_method( + self.delete_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.list_partner_sse_realms: gapic_v1.method.wrap_method( + self.list_partner_sse_realms, + default_timeout=None, + client_info=client_info, + ), + self.get_partner_sse_realm: gapic_v1.method.wrap_method( + self.get_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.create_partner_sse_realm: gapic_v1.method.wrap_method( + self.create_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.delete_partner_sse_realm: gapic_v1.method.wrap_method( + self.delete_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_sac_realms( + self, + ) -> Callable[ + [sse_realm.ListSACRealmsRequest], + Union[ + sse_realm.ListSACRealmsResponse, Awaitable[sse_realm.ListSACRealmsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_sac_realm( + self, + ) -> Callable[ + [sse_realm.GetSACRealmRequest], + Union[sse_realm.SACRealm, Awaitable[sse_realm.SACRealm]], + ]: + raise NotImplementedError() + + @property + def create_sac_realm( + self, + ) -> Callable[ + [sse_realm.CreateSACRealmRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_sac_realm( + self, + ) -> Callable[ + [sse_realm.DeleteSACRealmRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_sac_attachments( + self, + ) -> Callable[ + [sse_realm.ListSACAttachmentsRequest], + Union[ + sse_realm.ListSACAttachmentsResponse, + Awaitable[sse_realm.ListSACAttachmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_sac_attachment( + self, + ) -> Callable[ + [sse_realm.GetSACAttachmentRequest], + Union[sse_realm.SACAttachment, Awaitable[sse_realm.SACAttachment]], + ]: + raise NotImplementedError() + + @property + def create_sac_attachment( + self, + ) -> Callable[ + [sse_realm.CreateSACAttachmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_sac_attachment( + self, + ) -> Callable[ + [sse_realm.DeleteSACAttachmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_partner_sse_realms( + self, + ) -> Callable[ + [sse_realm.ListPartnerSSERealmsRequest], + Union[ + sse_realm.ListPartnerSSERealmsResponse, + Awaitable[sse_realm.ListPartnerSSERealmsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.GetPartnerSSERealmRequest], + Union[sse_realm.PartnerSSERealm, Awaitable[sse_realm.PartnerSSERealm]], + ]: + raise NotImplementedError() + + @property + def create_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.CreatePartnerSSERealmRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.DeletePartnerSSERealmRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SSERealmServiceTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc.py new file mode 100644 index 000000000000..3849fe96fdb4 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_realm + +from .base import DEFAULT_CLIENT_INFO, SSERealmServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SSERealmServiceGrpcTransport(SSERealmServiceTransport): + """gRPC backend transport for SSERealmService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_sac_realms( + self, + ) -> Callable[[sse_realm.ListSACRealmsRequest], sse_realm.ListSACRealmsResponse]: + r"""Return a callable for the list sac realms method over gRPC. + + Lists SACRealms in a given project. + + Returns: + Callable[[~.ListSACRealmsRequest], + ~.ListSACRealmsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sac_realms" not in self._stubs: + self._stubs["list_sac_realms"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListSACRealms", + request_serializer=sse_realm.ListSACRealmsRequest.serialize, + response_deserializer=sse_realm.ListSACRealmsResponse.deserialize, + ) + return self._stubs["list_sac_realms"] + + @property + def get_sac_realm( + self, + ) -> Callable[[sse_realm.GetSACRealmRequest], sse_realm.SACRealm]: + r"""Return a callable for the get sac realm method over gRPC. + + Returns the specified realm. + + Returns: + Callable[[~.GetSACRealmRequest], + ~.SACRealm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sac_realm" not in self._stubs: + self._stubs["get_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetSACRealm", + request_serializer=sse_realm.GetSACRealmRequest.serialize, + response_deserializer=sse_realm.SACRealm.deserialize, + ) + return self._stubs["get_sac_realm"] + + @property + def create_sac_realm( + self, + ) -> Callable[[sse_realm.CreateSACRealmRequest], operations_pb2.Operation]: + r"""Return a callable for the create sac realm method over gRPC. + + Creates a new SACRealm in a given project. + + Returns: + Callable[[~.CreateSACRealmRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sac_realm" not in self._stubs: + self._stubs["create_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreateSACRealm", + request_serializer=sse_realm.CreateSACRealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_sac_realm"] + + @property + def delete_sac_realm( + self, + ) -> Callable[[sse_realm.DeleteSACRealmRequest], operations_pb2.Operation]: + r"""Return a callable for the delete sac realm method over gRPC. + + Deletes the specified realm. + + Returns: + Callable[[~.DeleteSACRealmRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sac_realm" not in self._stubs: + self._stubs["delete_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeleteSACRealm", + request_serializer=sse_realm.DeleteSACRealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_sac_realm"] + + @property + def list_sac_attachments( + self, + ) -> Callable[ + [sse_realm.ListSACAttachmentsRequest], sse_realm.ListSACAttachmentsResponse + ]: + r"""Return a callable for the list sac attachments method over gRPC. + + Lists SACAttachments in a given project and location. + + Returns: + Callable[[~.ListSACAttachmentsRequest], + ~.ListSACAttachmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sac_attachments" not in self._stubs: + self._stubs["list_sac_attachments"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListSACAttachments", + request_serializer=sse_realm.ListSACAttachmentsRequest.serialize, + response_deserializer=sse_realm.ListSACAttachmentsResponse.deserialize, + ) + return self._stubs["list_sac_attachments"] + + @property + def get_sac_attachment( + self, + ) -> Callable[[sse_realm.GetSACAttachmentRequest], sse_realm.SACAttachment]: + r"""Return a callable for the get sac attachment method over gRPC. + + Returns the specified attachment. + + Returns: + Callable[[~.GetSACAttachmentRequest], + ~.SACAttachment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sac_attachment" not in self._stubs: + self._stubs["get_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetSACAttachment", + request_serializer=sse_realm.GetSACAttachmentRequest.serialize, + response_deserializer=sse_realm.SACAttachment.deserialize, + ) + return self._stubs["get_sac_attachment"] + + @property + def create_sac_attachment( + self, + ) -> Callable[[sse_realm.CreateSACAttachmentRequest], operations_pb2.Operation]: + r"""Return a callable for the create sac attachment method over gRPC. + + Creates a new SACAttachment in a given project and + location. + + Returns: + Callable[[~.CreateSACAttachmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sac_attachment" not in self._stubs: + self._stubs["create_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreateSACAttachment", + request_serializer=sse_realm.CreateSACAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_sac_attachment"] + + @property + def delete_sac_attachment( + self, + ) -> Callable[[sse_realm.DeleteSACAttachmentRequest], operations_pb2.Operation]: + r"""Return a callable for the delete sac attachment method over gRPC. + + Deletes the specified attachment. + + Returns: + Callable[[~.DeleteSACAttachmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sac_attachment" not in self._stubs: + self._stubs["delete_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeleteSACAttachment", + request_serializer=sse_realm.DeleteSACAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_sac_attachment"] + + @property + def list_partner_sse_realms( + self, + ) -> Callable[ + [sse_realm.ListPartnerSSERealmsRequest], sse_realm.ListPartnerSSERealmsResponse + ]: + r"""Return a callable for the list partner sse realms method over gRPC. + + Lists PartnerSSERealms in a given project and + location. + + Returns: + Callable[[~.ListPartnerSSERealmsRequest], + ~.ListPartnerSSERealmsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_partner_sse_realms" not in self._stubs: + self._stubs["list_partner_sse_realms"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListPartnerSSERealms", + request_serializer=sse_realm.ListPartnerSSERealmsRequest.serialize, + response_deserializer=sse_realm.ListPartnerSSERealmsResponse.deserialize, + ) + return self._stubs["list_partner_sse_realms"] + + @property + def get_partner_sse_realm( + self, + ) -> Callable[[sse_realm.GetPartnerSSERealmRequest], sse_realm.PartnerSSERealm]: + r"""Return a callable for the get partner sse realm method over gRPC. + + Gets details of a single PartnerSSERealm. + + Returns: + Callable[[~.GetPartnerSSERealmRequest], + ~.PartnerSSERealm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_partner_sse_realm" not in self._stubs: + self._stubs["get_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetPartnerSSERealm", + request_serializer=sse_realm.GetPartnerSSERealmRequest.serialize, + response_deserializer=sse_realm.PartnerSSERealm.deserialize, + ) + return self._stubs["get_partner_sse_realm"] + + @property + def create_partner_sse_realm( + self, + ) -> Callable[[sse_realm.CreatePartnerSSERealmRequest], operations_pb2.Operation]: + r"""Return a callable for the create partner sse realm method over gRPC. + + Creates a new PartnerSSERealm in a given project and + location. + + Returns: + Callable[[~.CreatePartnerSSERealmRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partner_sse_realm" not in self._stubs: + self._stubs["create_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreatePartnerSSERealm", + request_serializer=sse_realm.CreatePartnerSSERealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_partner_sse_realm"] + + @property + def delete_partner_sse_realm( + self, + ) -> Callable[[sse_realm.DeletePartnerSSERealmRequest], operations_pb2.Operation]: + r"""Return a callable for the delete partner sse realm method over gRPC. + + Deletes a single PartnerSSERealm. + + Returns: + Callable[[~.DeletePartnerSSERealmRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partner_sse_realm" not in self._stubs: + self._stubs["delete_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeletePartnerSSERealm", + request_serializer=sse_realm.DeletePartnerSSERealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_partner_sse_realm"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SSERealmServiceGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..80af49d10951 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/grpc_asyncio.py @@ -0,0 +1,1001 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import sse_realm + +from .base import DEFAULT_CLIENT_INFO, SSERealmServiceTransport +from .grpc import SSERealmServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SSERealmServiceGrpcAsyncIOTransport(SSERealmServiceTransport): + """gRPC AsyncIO backend transport for SSERealmService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_sac_realms( + self, + ) -> Callable[ + [sse_realm.ListSACRealmsRequest], Awaitable[sse_realm.ListSACRealmsResponse] + ]: + r"""Return a callable for the list sac realms method over gRPC. + + Lists SACRealms in a given project. + + Returns: + Callable[[~.ListSACRealmsRequest], + Awaitable[~.ListSACRealmsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sac_realms" not in self._stubs: + self._stubs["list_sac_realms"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListSACRealms", + request_serializer=sse_realm.ListSACRealmsRequest.serialize, + response_deserializer=sse_realm.ListSACRealmsResponse.deserialize, + ) + return self._stubs["list_sac_realms"] + + @property + def get_sac_realm( + self, + ) -> Callable[[sse_realm.GetSACRealmRequest], Awaitable[sse_realm.SACRealm]]: + r"""Return a callable for the get sac realm method over gRPC. + + Returns the specified realm. + + Returns: + Callable[[~.GetSACRealmRequest], + Awaitable[~.SACRealm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sac_realm" not in self._stubs: + self._stubs["get_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetSACRealm", + request_serializer=sse_realm.GetSACRealmRequest.serialize, + response_deserializer=sse_realm.SACRealm.deserialize, + ) + return self._stubs["get_sac_realm"] + + @property + def create_sac_realm( + self, + ) -> Callable[ + [sse_realm.CreateSACRealmRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create sac realm method over gRPC. + + Creates a new SACRealm in a given project. + + Returns: + Callable[[~.CreateSACRealmRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sac_realm" not in self._stubs: + self._stubs["create_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreateSACRealm", + request_serializer=sse_realm.CreateSACRealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_sac_realm"] + + @property + def delete_sac_realm( + self, + ) -> Callable[ + [sse_realm.DeleteSACRealmRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete sac realm method over gRPC. + + Deletes the specified realm. + + Returns: + Callable[[~.DeleteSACRealmRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sac_realm" not in self._stubs: + self._stubs["delete_sac_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeleteSACRealm", + request_serializer=sse_realm.DeleteSACRealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_sac_realm"] + + @property + def list_sac_attachments( + self, + ) -> Callable[ + [sse_realm.ListSACAttachmentsRequest], + Awaitable[sse_realm.ListSACAttachmentsResponse], + ]: + r"""Return a callable for the list sac attachments method over gRPC. + + Lists SACAttachments in a given project and location. + + Returns: + Callable[[~.ListSACAttachmentsRequest], + Awaitable[~.ListSACAttachmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sac_attachments" not in self._stubs: + self._stubs["list_sac_attachments"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListSACAttachments", + request_serializer=sse_realm.ListSACAttachmentsRequest.serialize, + response_deserializer=sse_realm.ListSACAttachmentsResponse.deserialize, + ) + return self._stubs["list_sac_attachments"] + + @property + def get_sac_attachment( + self, + ) -> Callable[ + [sse_realm.GetSACAttachmentRequest], Awaitable[sse_realm.SACAttachment] + ]: + r"""Return a callable for the get sac attachment method over gRPC. + + Returns the specified attachment. + + Returns: + Callable[[~.GetSACAttachmentRequest], + Awaitable[~.SACAttachment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_sac_attachment" not in self._stubs: + self._stubs["get_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetSACAttachment", + request_serializer=sse_realm.GetSACAttachmentRequest.serialize, + response_deserializer=sse_realm.SACAttachment.deserialize, + ) + return self._stubs["get_sac_attachment"] + + @property + def create_sac_attachment( + self, + ) -> Callable[ + [sse_realm.CreateSACAttachmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create sac attachment method over gRPC. + + Creates a new SACAttachment in a given project and + location. + + Returns: + Callable[[~.CreateSACAttachmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_sac_attachment" not in self._stubs: + self._stubs["create_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreateSACAttachment", + request_serializer=sse_realm.CreateSACAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_sac_attachment"] + + @property + def delete_sac_attachment( + self, + ) -> Callable[ + [sse_realm.DeleteSACAttachmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete sac attachment method over gRPC. + + Deletes the specified attachment. + + Returns: + Callable[[~.DeleteSACAttachmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_sac_attachment" not in self._stubs: + self._stubs["delete_sac_attachment"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeleteSACAttachment", + request_serializer=sse_realm.DeleteSACAttachmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_sac_attachment"] + + @property + def list_partner_sse_realms( + self, + ) -> Callable[ + [sse_realm.ListPartnerSSERealmsRequest], + Awaitable[sse_realm.ListPartnerSSERealmsResponse], + ]: + r"""Return a callable for the list partner sse realms method over gRPC. + + Lists PartnerSSERealms in a given project and + location. + + Returns: + Callable[[~.ListPartnerSSERealmsRequest], + Awaitable[~.ListPartnerSSERealmsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_partner_sse_realms" not in self._stubs: + self._stubs["list_partner_sse_realms"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/ListPartnerSSERealms", + request_serializer=sse_realm.ListPartnerSSERealmsRequest.serialize, + response_deserializer=sse_realm.ListPartnerSSERealmsResponse.deserialize, + ) + return self._stubs["list_partner_sse_realms"] + + @property + def get_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.GetPartnerSSERealmRequest], Awaitable[sse_realm.PartnerSSERealm] + ]: + r"""Return a callable for the get partner sse realm method over gRPC. + + Gets details of a single PartnerSSERealm. + + Returns: + Callable[[~.GetPartnerSSERealmRequest], + Awaitable[~.PartnerSSERealm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_partner_sse_realm" not in self._stubs: + self._stubs["get_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/GetPartnerSSERealm", + request_serializer=sse_realm.GetPartnerSSERealmRequest.serialize, + response_deserializer=sse_realm.PartnerSSERealm.deserialize, + ) + return self._stubs["get_partner_sse_realm"] + + @property + def create_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.CreatePartnerSSERealmRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create partner sse realm method over gRPC. + + Creates a new PartnerSSERealm in a given project and + location. + + Returns: + Callable[[~.CreatePartnerSSERealmRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partner_sse_realm" not in self._stubs: + self._stubs["create_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/CreatePartnerSSERealm", + request_serializer=sse_realm.CreatePartnerSSERealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_partner_sse_realm"] + + @property + def delete_partner_sse_realm( + self, + ) -> Callable[ + [sse_realm.DeletePartnerSSERealmRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete partner sse realm method over gRPC. + + Deletes a single PartnerSSERealm. + + Returns: + Callable[[~.DeletePartnerSSERealmRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partner_sse_realm" not in self._stubs: + self._stubs["delete_partner_sse_realm"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1alpha1.SSERealmService/DeletePartnerSSERealm", + request_serializer=sse_realm.DeletePartnerSSERealmRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_partner_sse_realm"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_sac_realms: self._wrap_method( + self.list_sac_realms, + default_timeout=None, + client_info=client_info, + ), + self.get_sac_realm: self._wrap_method( + self.get_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.create_sac_realm: self._wrap_method( + self.create_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.delete_sac_realm: self._wrap_method( + self.delete_sac_realm, + default_timeout=None, + client_info=client_info, + ), + self.list_sac_attachments: self._wrap_method( + self.list_sac_attachments, + default_timeout=None, + client_info=client_info, + ), + self.get_sac_attachment: self._wrap_method( + self.get_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.create_sac_attachment: self._wrap_method( + self.create_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.delete_sac_attachment: self._wrap_method( + self.delete_sac_attachment, + default_timeout=None, + client_info=client_info, + ), + self.list_partner_sse_realms: self._wrap_method( + self.list_partner_sse_realms, + default_timeout=None, + client_info=client_info, + ), + self.get_partner_sse_realm: self._wrap_method( + self.get_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.create_partner_sse_realm: self._wrap_method( + self.create_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.delete_partner_sse_realm: self._wrap_method( + self.delete_partner_sse_realm, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SSERealmServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest.py new file mode 100644 index 000000000000..1d8925bb7c90 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest.py @@ -0,0 +1,4275 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1alpha1.types import sse_realm + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseSSERealmServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SSERealmServiceRestInterceptor: + """Interceptor for SSERealmService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SSERealmServiceRestTransport. + + .. code-block:: python + class MyCustomSSERealmServiceInterceptor(SSERealmServiceRestInterceptor): + def pre_create_partner_sse_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_partner_sse_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_sac_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_sac_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_sac_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_sac_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_partner_sse_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_partner_sse_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_sac_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_sac_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_sac_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_sac_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_partner_sse_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_partner_sse_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_sac_attachment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_sac_attachment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_sac_realm(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_sac_realm(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_partner_sse_realms(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_partner_sse_realms(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sac_attachments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sac_attachments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sac_realms(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sac_realms(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SSERealmServiceRestTransport(interceptor=MyCustomSSERealmServiceInterceptor()) + client = SSERealmServiceClient(transport=transport) + + + """ + + def pre_create_partner_sse_realm( + self, + request: sse_realm.CreatePartnerSSERealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.CreatePartnerSSERealmRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_partner_sse_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_create_partner_sse_realm( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_partner_sse_realm + + DEPRECATED. Please use the `post_create_partner_sse_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_create_partner_sse_realm` interceptor runs + before the `post_create_partner_sse_realm_with_metadata` interceptor. + """ + return response + + def post_create_partner_sse_realm_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_partner_sse_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_create_partner_sse_realm_with_metadata` + interceptor in new development instead of the `post_create_partner_sse_realm` interceptor. + When both interceptors are used, this `post_create_partner_sse_realm_with_metadata` interceptor runs after the + `post_create_partner_sse_realm` interceptor. The (possibly modified) response returned by + `post_create_partner_sse_realm` will be passed to + `post_create_partner_sse_realm_with_metadata`. + """ + return response, metadata + + def pre_create_sac_attachment( + self, + request: sse_realm.CreateSACAttachmentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.CreateSACAttachmentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_sac_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_create_sac_attachment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_sac_attachment + + DEPRECATED. Please use the `post_create_sac_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_create_sac_attachment` interceptor runs + before the `post_create_sac_attachment_with_metadata` interceptor. + """ + return response + + def post_create_sac_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sac_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_create_sac_attachment_with_metadata` + interceptor in new development instead of the `post_create_sac_attachment` interceptor. + When both interceptors are used, this `post_create_sac_attachment_with_metadata` interceptor runs after the + `post_create_sac_attachment` interceptor. The (possibly modified) response returned by + `post_create_sac_attachment` will be passed to + `post_create_sac_attachment_with_metadata`. + """ + return response, metadata + + def pre_create_sac_realm( + self, + request: sse_realm.CreateSACRealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.CreateSACRealmRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_sac_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_create_sac_realm( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_sac_realm + + DEPRECATED. Please use the `post_create_sac_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_create_sac_realm` interceptor runs + before the `post_create_sac_realm_with_metadata` interceptor. + """ + return response + + def post_create_sac_realm_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sac_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_create_sac_realm_with_metadata` + interceptor in new development instead of the `post_create_sac_realm` interceptor. + When both interceptors are used, this `post_create_sac_realm_with_metadata` interceptor runs after the + `post_create_sac_realm` interceptor. The (possibly modified) response returned by + `post_create_sac_realm` will be passed to + `post_create_sac_realm_with_metadata`. + """ + return response, metadata + + def pre_delete_partner_sse_realm( + self, + request: sse_realm.DeletePartnerSSERealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.DeletePartnerSSERealmRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_partner_sse_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_delete_partner_sse_realm( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_partner_sse_realm + + DEPRECATED. Please use the `post_delete_partner_sse_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_delete_partner_sse_realm` interceptor runs + before the `post_delete_partner_sse_realm_with_metadata` interceptor. + """ + return response + + def post_delete_partner_sse_realm_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_partner_sse_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_delete_partner_sse_realm_with_metadata` + interceptor in new development instead of the `post_delete_partner_sse_realm` interceptor. + When both interceptors are used, this `post_delete_partner_sse_realm_with_metadata` interceptor runs after the + `post_delete_partner_sse_realm` interceptor. The (possibly modified) response returned by + `post_delete_partner_sse_realm` will be passed to + `post_delete_partner_sse_realm_with_metadata`. + """ + return response, metadata + + def pre_delete_sac_attachment( + self, + request: sse_realm.DeleteSACAttachmentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.DeleteSACAttachmentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_sac_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_delete_sac_attachment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_sac_attachment + + DEPRECATED. Please use the `post_delete_sac_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_delete_sac_attachment` interceptor runs + before the `post_delete_sac_attachment_with_metadata` interceptor. + """ + return response + + def post_delete_sac_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_sac_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_delete_sac_attachment_with_metadata` + interceptor in new development instead of the `post_delete_sac_attachment` interceptor. + When both interceptors are used, this `post_delete_sac_attachment_with_metadata` interceptor runs after the + `post_delete_sac_attachment` interceptor. The (possibly modified) response returned by + `post_delete_sac_attachment` will be passed to + `post_delete_sac_attachment_with_metadata`. + """ + return response, metadata + + def pre_delete_sac_realm( + self, + request: sse_realm.DeleteSACRealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.DeleteSACRealmRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_sac_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_delete_sac_realm( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_sac_realm + + DEPRECATED. Please use the `post_delete_sac_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_delete_sac_realm` interceptor runs + before the `post_delete_sac_realm_with_metadata` interceptor. + """ + return response + + def post_delete_sac_realm_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_sac_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_delete_sac_realm_with_metadata` + interceptor in new development instead of the `post_delete_sac_realm` interceptor. + When both interceptors are used, this `post_delete_sac_realm_with_metadata` interceptor runs after the + `post_delete_sac_realm` interceptor. The (possibly modified) response returned by + `post_delete_sac_realm` will be passed to + `post_delete_sac_realm_with_metadata`. + """ + return response, metadata + + def pre_get_partner_sse_realm( + self, + request: sse_realm.GetPartnerSSERealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.GetPartnerSSERealmRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_partner_sse_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_partner_sse_realm( + self, response: sse_realm.PartnerSSERealm + ) -> sse_realm.PartnerSSERealm: + """Post-rpc interceptor for get_partner_sse_realm + + DEPRECATED. Please use the `post_get_partner_sse_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_get_partner_sse_realm` interceptor runs + before the `post_get_partner_sse_realm_with_metadata` interceptor. + """ + return response + + def post_get_partner_sse_realm_with_metadata( + self, + response: sse_realm.PartnerSSERealm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_realm.PartnerSSERealm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_partner_sse_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_get_partner_sse_realm_with_metadata` + interceptor in new development instead of the `post_get_partner_sse_realm` interceptor. + When both interceptors are used, this `post_get_partner_sse_realm_with_metadata` interceptor runs after the + `post_get_partner_sse_realm` interceptor. The (possibly modified) response returned by + `post_get_partner_sse_realm` will be passed to + `post_get_partner_sse_realm_with_metadata`. + """ + return response, metadata + + def pre_get_sac_attachment( + self, + request: sse_realm.GetSACAttachmentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.GetSACAttachmentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_sac_attachment + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_sac_attachment( + self, response: sse_realm.SACAttachment + ) -> sse_realm.SACAttachment: + """Post-rpc interceptor for get_sac_attachment + + DEPRECATED. Please use the `post_get_sac_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_get_sac_attachment` interceptor runs + before the `post_get_sac_attachment_with_metadata` interceptor. + """ + return response + + def post_get_sac_attachment_with_metadata( + self, + response: sse_realm.SACAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_realm.SACAttachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sac_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_get_sac_attachment_with_metadata` + interceptor in new development instead of the `post_get_sac_attachment` interceptor. + When both interceptors are used, this `post_get_sac_attachment_with_metadata` interceptor runs after the + `post_get_sac_attachment` interceptor. The (possibly modified) response returned by + `post_get_sac_attachment` will be passed to + `post_get_sac_attachment_with_metadata`. + """ + return response, metadata + + def pre_get_sac_realm( + self, + request: sse_realm.GetSACRealmRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_realm.GetSACRealmRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_sac_realm + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_sac_realm(self, response: sse_realm.SACRealm) -> sse_realm.SACRealm: + """Post-rpc interceptor for get_sac_realm + + DEPRECATED. Please use the `post_get_sac_realm_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_get_sac_realm` interceptor runs + before the `post_get_sac_realm_with_metadata` interceptor. + """ + return response + + def post_get_sac_realm_with_metadata( + self, + response: sse_realm.SACRealm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_realm.SACRealm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sac_realm + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_get_sac_realm_with_metadata` + interceptor in new development instead of the `post_get_sac_realm` interceptor. + When both interceptors are used, this `post_get_sac_realm_with_metadata` interceptor runs after the + `post_get_sac_realm` interceptor. The (possibly modified) response returned by + `post_get_sac_realm` will be passed to + `post_get_sac_realm_with_metadata`. + """ + return response, metadata + + def pre_list_partner_sse_realms( + self, + request: sse_realm.ListPartnerSSERealmsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.ListPartnerSSERealmsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_partner_sse_realms + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_list_partner_sse_realms( + self, response: sse_realm.ListPartnerSSERealmsResponse + ) -> sse_realm.ListPartnerSSERealmsResponse: + """Post-rpc interceptor for list_partner_sse_realms + + DEPRECATED. Please use the `post_list_partner_sse_realms_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_list_partner_sse_realms` interceptor runs + before the `post_list_partner_sse_realms_with_metadata` interceptor. + """ + return response + + def post_list_partner_sse_realms_with_metadata( + self, + response: sse_realm.ListPartnerSSERealmsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.ListPartnerSSERealmsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_partner_sse_realms + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_list_partner_sse_realms_with_metadata` + interceptor in new development instead of the `post_list_partner_sse_realms` interceptor. + When both interceptors are used, this `post_list_partner_sse_realms_with_metadata` interceptor runs after the + `post_list_partner_sse_realms` interceptor. The (possibly modified) response returned by + `post_list_partner_sse_realms` will be passed to + `post_list_partner_sse_realms_with_metadata`. + """ + return response, metadata + + def pre_list_sac_attachments( + self, + request: sse_realm.ListSACAttachmentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.ListSACAttachmentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_sac_attachments + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_list_sac_attachments( + self, response: sse_realm.ListSACAttachmentsResponse + ) -> sse_realm.ListSACAttachmentsResponse: + """Post-rpc interceptor for list_sac_attachments + + DEPRECATED. Please use the `post_list_sac_attachments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_list_sac_attachments` interceptor runs + before the `post_list_sac_attachments_with_metadata` interceptor. + """ + return response + + def post_list_sac_attachments_with_metadata( + self, + response: sse_realm.ListSACAttachmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.ListSACAttachmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_sac_attachments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_list_sac_attachments_with_metadata` + interceptor in new development instead of the `post_list_sac_attachments` interceptor. + When both interceptors are used, this `post_list_sac_attachments_with_metadata` interceptor runs after the + `post_list_sac_attachments` interceptor. The (possibly modified) response returned by + `post_list_sac_attachments` will be passed to + `post_list_sac_attachments_with_metadata`. + """ + return response, metadata + + def pre_list_sac_realms( + self, + request: sse_realm.ListSACRealmsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sse_realm.ListSACRealmsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_sac_realms + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_list_sac_realms( + self, response: sse_realm.ListSACRealmsResponse + ) -> sse_realm.ListSACRealmsResponse: + """Post-rpc interceptor for list_sac_realms + + DEPRECATED. Please use the `post_list_sac_realms_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. This `post_list_sac_realms` interceptor runs + before the `post_list_sac_realms_with_metadata` interceptor. + """ + return response + + def post_list_sac_realms_with_metadata( + self, + response: sse_realm.ListSACRealmsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sse_realm.ListSACRealmsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_sac_realms + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SSERealmService server but before it is returned to user code. + + We recommend only using this `post_list_sac_realms_with_metadata` + interceptor in new development instead of the `post_list_sac_realms` interceptor. + When both interceptors are used, this `post_list_sac_realms_with_metadata` interceptor runs after the + `post_list_sac_realms` interceptor. The (possibly modified) response returned by + `post_list_sac_realms` will be passed to + `post_list_sac_realms_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SSERealmService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SSERealmService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SSERealmServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SSERealmServiceRestInterceptor + + +class SSERealmServiceRestTransport(_BaseSSERealmServiceRestTransport): + """REST backend synchronous transport for SSERealmService. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SSERealmServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SSERealmServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreatePartnerSSERealm( + _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.CreatePartnerSSERealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: sse_realm.CreatePartnerSSERealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create partner sse realm method over HTTP. + + Args: + request (~.sse_realm.CreatePartnerSSERealmRequest): + The request object. Message for creating a + PartnerSSERealm + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_partner_sse_realm( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.CreatePartnerSSERealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreatePartnerSSERealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSERealmServiceRestTransport._CreatePartnerSSERealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_partner_sse_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_partner_sse_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.create_partner_sse_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreatePartnerSSERealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSACAttachment( + _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.CreateSACAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: sse_realm.CreateSACAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create sac attachment method over HTTP. + + Args: + request (~.sse_realm.CreateSACAttachmentRequest): + The request object. Request for ``CreateSACAttachment`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_sac_attachment( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.CreateSACAttachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreateSACAttachment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._CreateSACAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_sac_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sac_attachment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.create_sac_attachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreateSACAttachment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSACRealm( + _BaseSSERealmServiceRestTransport._BaseCreateSACRealm, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.CreateSACRealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: sse_realm.CreateSACRealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create sac realm method over HTTP. + + Args: + request (~.sse_realm.CreateSACRealmRequest): + The request object. Request for ``CreateSACRealm`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseCreateSACRealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_sac_realm( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseCreateSACRealm._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseCreateSACRealm._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseCreateSACRealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.CreateSACRealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreateSACRealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._CreateSACRealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_sac_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sac_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.create_sac_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CreateSACRealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeletePartnerSSERealm( + _BaseSSERealmServiceRestTransport._BaseDeletePartnerSSERealm, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.DeletePartnerSSERealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.DeletePartnerSSERealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete partner sse realm method over HTTP. + + Args: + request (~.sse_realm.DeletePartnerSSERealmRequest): + The request object. Message for deleting a + PartnerSSERealm + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseDeletePartnerSSERealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_partner_sse_realm( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseDeletePartnerSSERealm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseDeletePartnerSSERealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.DeletePartnerSSERealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeletePartnerSSERealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + SSERealmServiceRestTransport._DeletePartnerSSERealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_partner_sse_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_partner_sse_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.delete_partner_sse_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeletePartnerSSERealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSACAttachment( + _BaseSSERealmServiceRestTransport._BaseDeleteSACAttachment, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.DeleteSACAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.DeleteSACAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete sac attachment method over HTTP. + + Args: + request (~.sse_realm.DeleteSACAttachmentRequest): + The request object. Request for ``DeleteSACAttachment`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseDeleteSACAttachment._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_sac_attachment( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseDeleteSACAttachment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseDeleteSACAttachment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.DeleteSACAttachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeleteSACAttachment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._DeleteSACAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_sac_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_sac_attachment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.delete_sac_attachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeleteSACAttachment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSACRealm( + _BaseSSERealmServiceRestTransport._BaseDeleteSACRealm, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.DeleteSACRealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.DeleteSACRealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete sac realm method over HTTP. + + Args: + request (~.sse_realm.DeleteSACRealmRequest): + The request object. Request for ``DeleteSACRealm`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseDeleteSACRealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_sac_realm( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseDeleteSACRealm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseDeleteSACRealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.DeleteSACRealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeleteSACRealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._DeleteSACRealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_sac_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_sac_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.delete_sac_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeleteSACRealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetPartnerSSERealm( + _BaseSSERealmServiceRestTransport._BaseGetPartnerSSERealm, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetPartnerSSERealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.GetPartnerSSERealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.PartnerSSERealm: + r"""Call the get partner sse realm method over HTTP. + + Args: + request (~.sse_realm.GetPartnerSSERealmRequest): + The request object. Message for getting a PartnerSSERealm + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.PartnerSSERealm: + Message describing PartnerSSERealm + object + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetPartnerSSERealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_partner_sse_realm( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetPartnerSSERealm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetPartnerSSERealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetPartnerSSERealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetPartnerSSERealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetPartnerSSERealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.PartnerSSERealm() + pb_resp = sse_realm.PartnerSSERealm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_partner_sse_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_sse_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.PartnerSSERealm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.get_partner_sse_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetPartnerSSERealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSACAttachment( + _BaseSSERealmServiceRestTransport._BaseGetSACAttachment, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetSACAttachment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.GetSACAttachmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACAttachment: + r"""Call the get sac attachment method over HTTP. + + Args: + request (~.sse_realm.GetSACAttachmentRequest): + The request object. Request for ``GetSACAttachment`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.SACAttachment: + Represents a Secure Access Connect + (SAC) attachment resource. + A Secure Access Connect attachment + enables NCC Gateway to process traffic + with an SSE product. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetSACAttachment._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_sac_attachment( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetSACAttachment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetSACAttachment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetSACAttachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetSACAttachment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetSACAttachment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.SACAttachment() + pb_resp = sse_realm.SACAttachment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_sac_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sac_attachment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.SACAttachment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.get_sac_attachment", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetSACAttachment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSACRealm( + _BaseSSERealmServiceRestTransport._BaseGetSACRealm, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetSACRealm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.GetSACRealmRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.SACRealm: + r"""Call the get sac realm method over HTTP. + + Args: + request (~.sse_realm.GetSACRealmRequest): + The request object. Request for ``GetSACRealm`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.SACRealm: + Represents a Secure Access Connect + (SAC) realm resource. + A Secure Access Connect realm + establishes a connection between your + Google Cloud project and an SSE service. + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetSACRealm._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_sac_realm(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetSACRealm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetSACRealm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetSACRealm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetSACRealm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetSACRealm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.SACRealm() + pb_resp = sse_realm.SACRealm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_sac_realm(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sac_realm_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.SACRealm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.get_sac_realm", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetSACRealm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListPartnerSSERealms( + _BaseSSERealmServiceRestTransport._BaseListPartnerSSERealms, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.ListPartnerSSERealms") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.ListPartnerSSERealmsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.ListPartnerSSERealmsResponse: + r"""Call the list partner sse realms method over HTTP. + + Args: + request (~.sse_realm.ListPartnerSSERealmsRequest): + The request object. Message for requesting list of + PartnerSSERealms + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.ListPartnerSSERealmsResponse: + Message for response to listing + PartnerSSERealms + + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseListPartnerSSERealms._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_partner_sse_realms( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseListPartnerSSERealms._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseListPartnerSSERealms._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.ListPartnerSSERealms", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListPartnerSSERealms", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._ListPartnerSSERealms._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.ListPartnerSSERealmsResponse() + pb_resp = sse_realm.ListPartnerSSERealmsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_partner_sse_realms(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_partner_sse_realms_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.ListPartnerSSERealmsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.list_partner_sse_realms", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListPartnerSSERealms", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSACAttachments( + _BaseSSERealmServiceRestTransport._BaseListSACAttachments, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.ListSACAttachments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.ListSACAttachmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.ListSACAttachmentsResponse: + r"""Call the list sac attachments method over HTTP. + + Args: + request (~.sse_realm.ListSACAttachmentsRequest): + The request object. Request for ``ListSACAttachments`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.ListSACAttachmentsResponse: + Response for ``ListSACAttachments`` method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseListSACAttachments._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_sac_attachments( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseListSACAttachments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseListSACAttachments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.ListSACAttachments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListSACAttachments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._ListSACAttachments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.ListSACAttachmentsResponse() + pb_resp = sse_realm.ListSACAttachmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sac_attachments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sac_attachments_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.ListSACAttachmentsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.list_sac_attachments", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListSACAttachments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSACRealms( + _BaseSSERealmServiceRestTransport._BaseListSACRealms, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.ListSACRealms") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: sse_realm.ListSACRealmsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> sse_realm.ListSACRealmsResponse: + r"""Call the list sac realms method over HTTP. + + Args: + request (~.sse_realm.ListSACRealmsRequest): + The request object. Request for ``ListSACRealms`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.sse_realm.ListSACRealmsResponse: + Response for ``ListSACRealms`` method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseListSACRealms._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_sac_realms(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseListSACRealms._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseListSACRealms._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.ListSACRealms", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListSACRealms", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._ListSACRealms._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = sse_realm.ListSACRealmsResponse() + pb_resp = sse_realm.ListSACRealmsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sac_realms(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sac_realms_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = sse_realm.ListSACRealmsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.list_sac_realms", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListSACRealms", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_partner_sse_realm( + self, + ) -> Callable[[sse_realm.CreatePartnerSSERealmRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePartnerSSERealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_sac_attachment( + self, + ) -> Callable[[sse_realm.CreateSACAttachmentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSACAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_sac_realm( + self, + ) -> Callable[[sse_realm.CreateSACRealmRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSACRealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_partner_sse_realm( + self, + ) -> Callable[[sse_realm.DeletePartnerSSERealmRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePartnerSSERealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_sac_attachment( + self, + ) -> Callable[[sse_realm.DeleteSACAttachmentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSACAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_sac_realm( + self, + ) -> Callable[[sse_realm.DeleteSACRealmRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSACRealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_partner_sse_realm( + self, + ) -> Callable[[sse_realm.GetPartnerSSERealmRequest], sse_realm.PartnerSSERealm]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPartnerSSERealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_sac_attachment( + self, + ) -> Callable[[sse_realm.GetSACAttachmentRequest], sse_realm.SACAttachment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSACAttachment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_sac_realm( + self, + ) -> Callable[[sse_realm.GetSACRealmRequest], sse_realm.SACRealm]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSACRealm(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_partner_sse_realms( + self, + ) -> Callable[ + [sse_realm.ListPartnerSSERealmsRequest], sse_realm.ListPartnerSSERealmsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPartnerSSERealms(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sac_attachments( + self, + ) -> Callable[ + [sse_realm.ListSACAttachmentsRequest], sse_realm.ListSACAttachmentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSACAttachments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sac_realms( + self, + ) -> Callable[[sse_realm.ListSACRealmsRequest], sse_realm.ListSACRealmsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSACRealms(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseSSERealmServiceRestTransport._BaseGetLocation, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseSSERealmServiceRestTransport._BaseListLocations, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseSSERealmServiceRestTransport._BaseGetIamPolicy, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseSSERealmServiceRestTransport._BaseSetIamPolicy, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseSSERealmServiceRestTransport._BaseTestIamPermissions, + SSERealmServiceRestStub, + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseSSERealmServiceRestTransport._BaseCancelOperation, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseSSERealmServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseSSERealmServiceRestTransport._BaseDeleteOperation, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseSSERealmServiceRestTransport._BaseGetOperation, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseSSERealmServiceRestTransport._BaseListOperations, SSERealmServiceRestStub + ): + def __hash__(self): + return hash("SSERealmServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseSSERealmServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseSSERealmServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSSERealmServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1alpha1.SSERealmServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SSERealmServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1alpha1.SSERealmServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SSERealmServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest_base.py new file mode 100644 index 000000000000..ab48b435daae --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/services/sse_realm_service/transports/rest_base.py @@ -0,0 +1,1011 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1alpha1.types import sse_realm + +from .base import DEFAULT_CLIENT_INFO, SSERealmServiceTransport + + +class _BaseSSERealmServiceRestTransport(SSERealmServiceTransport): + """Base REST backend transport for SSERealmService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreatePartnerSSERealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "partnerSseRealmId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/partnerSSERealms", + "body": "partner_sse_realm", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.CreatePartnerSSERealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseCreatePartnerSSERealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSACAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "sacAttachmentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/sacAttachments", + "body": "sac_attachment", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.CreateSACAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseCreateSACAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSACRealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "sacRealmId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/sacRealms", + "body": "sac_realm", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.CreateSACRealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseCreateSACRealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeletePartnerSSERealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/partnerSSERealms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.DeletePartnerSSERealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseDeletePartnerSSERealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSACAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/sacAttachments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.DeleteSACAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseDeleteSACAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSACRealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/sacRealms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.DeleteSACRealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseDeleteSACRealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetPartnerSSERealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/partnerSSERealms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.GetPartnerSSERealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseGetPartnerSSERealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSACAttachment: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/sacAttachments/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.GetSACAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseGetSACAttachment._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSACRealm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/sacRealms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.GetSACRealmRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseGetSACRealm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListPartnerSSERealms: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/partnerSSERealms", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.ListPartnerSSERealmsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseListPartnerSSERealms._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSACAttachments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/sacAttachments", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.ListSACAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseListSACAttachments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSACRealms: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/sacRealms", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = sse_realm.ListSACRealmsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSSERealmServiceRestTransport._BaseListSACRealms._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/addressGroups/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{resource=projects/*/locations/*/authzPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSSERealmServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/__init__.py index c62d612cfc22..9219ce6427c5 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/__init__.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/__init__.py @@ -13,6 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .authorization_policy import ( + AuthorizationPolicy, + CreateAuthorizationPolicyRequest, + DeleteAuthorizationPolicyRequest, + GetAuthorizationPolicyRequest, + ListAuthorizationPoliciesRequest, + ListAuthorizationPoliciesResponse, + UpdateAuthorizationPolicyRequest, +) +from .authz_policy import ( + AuthzPolicy, + CreateAuthzPolicyRequest, + DeleteAuthzPolicyRequest, + GetAuthzPolicyRequest, + ListAuthzPoliciesRequest, + ListAuthzPoliciesResponse, + UpdateAuthzPolicyRequest, +) +from .backend_authentication_config import ( + BackendAuthenticationConfig, + CreateBackendAuthenticationConfigRequest, + DeleteBackendAuthenticationConfigRequest, + GetBackendAuthenticationConfigRequest, + ListBackendAuthenticationConfigsRequest, + ListBackendAuthenticationConfigsResponse, + UpdateBackendAuthenticationConfigRequest, +) from .client_tls_policy import ( ClientTlsPolicy, CreateClientTlsPolicyRequest, @@ -23,14 +50,228 @@ UpdateClientTlsPolicyRequest, ) from .common import OperationMetadata +from .dns_threat_detector import ( + CreateDnsThreatDetectorRequest, + DeleteDnsThreatDetectorRequest, + DnsThreatDetector, + GetDnsThreatDetectorRequest, + ListDnsThreatDetectorsRequest, + ListDnsThreatDetectorsResponse, + UpdateDnsThreatDetectorRequest, +) +from .firewall_activation import ( + CreateFirewallEndpointAssociationRequest, + CreateFirewallEndpointRequest, + DeleteFirewallEndpointAssociationRequest, + DeleteFirewallEndpointRequest, + FirewallEndpoint, + FirewallEndpointAssociation, + GetFirewallEndpointAssociationRequest, + GetFirewallEndpointRequest, + ListFirewallEndpointAssociationsRequest, + ListFirewallEndpointAssociationsResponse, + ListFirewallEndpointsRequest, + ListFirewallEndpointsResponse, + UpdateFirewallEndpointAssociationRequest, + UpdateFirewallEndpointRequest, +) +from .gateway_security_policy import ( + CreateGatewaySecurityPolicyRequest, + DeleteGatewaySecurityPolicyRequest, + GatewaySecurityPolicy, + GetGatewaySecurityPolicyRequest, + ListGatewaySecurityPoliciesRequest, + ListGatewaySecurityPoliciesResponse, + UpdateGatewaySecurityPolicyRequest, +) +from .gateway_security_policy_rule import ( + CreateGatewaySecurityPolicyRuleRequest, + DeleteGatewaySecurityPolicyRuleRequest, + GatewaySecurityPolicyRule, + GetGatewaySecurityPolicyRuleRequest, + ListGatewaySecurityPolicyRulesRequest, + ListGatewaySecurityPolicyRulesResponse, + UpdateGatewaySecurityPolicyRuleRequest, +) +from .intercept import ( + CreateInterceptDeploymentGroupRequest, + CreateInterceptDeploymentRequest, + CreateInterceptEndpointGroupAssociationRequest, + CreateInterceptEndpointGroupRequest, + DeleteInterceptDeploymentGroupRequest, + DeleteInterceptDeploymentRequest, + DeleteInterceptEndpointGroupAssociationRequest, + DeleteInterceptEndpointGroupRequest, + GetInterceptDeploymentGroupRequest, + GetInterceptDeploymentRequest, + GetInterceptEndpointGroupAssociationRequest, + GetInterceptEndpointGroupRequest, + InterceptDeployment, + InterceptDeploymentGroup, + InterceptEndpointGroup, + InterceptEndpointGroupAssociation, + InterceptLocation, + ListInterceptDeploymentGroupsRequest, + ListInterceptDeploymentGroupsResponse, + ListInterceptDeploymentsRequest, + ListInterceptDeploymentsResponse, + ListInterceptEndpointGroupAssociationsRequest, + ListInterceptEndpointGroupAssociationsResponse, + ListInterceptEndpointGroupsRequest, + ListInterceptEndpointGroupsResponse, + UpdateInterceptDeploymentGroupRequest, + UpdateInterceptDeploymentRequest, + UpdateInterceptEndpointGroupAssociationRequest, + UpdateInterceptEndpointGroupRequest, +) +from .mirroring import ( + CreateMirroringDeploymentGroupRequest, + CreateMirroringDeploymentRequest, + CreateMirroringEndpointGroupAssociationRequest, + CreateMirroringEndpointGroupRequest, + DeleteMirroringDeploymentGroupRequest, + DeleteMirroringDeploymentRequest, + DeleteMirroringEndpointGroupAssociationRequest, + DeleteMirroringEndpointGroupRequest, + GetMirroringDeploymentGroupRequest, + GetMirroringDeploymentRequest, + GetMirroringEndpointGroupAssociationRequest, + GetMirroringEndpointGroupRequest, + ListMirroringDeploymentGroupsRequest, + ListMirroringDeploymentGroupsResponse, + ListMirroringDeploymentsRequest, + ListMirroringDeploymentsResponse, + ListMirroringEndpointGroupAssociationsRequest, + ListMirroringEndpointGroupAssociationsResponse, + ListMirroringEndpointGroupsRequest, + ListMirroringEndpointGroupsResponse, + MirroringDeployment, + MirroringDeploymentGroup, + MirroringEndpointGroup, + MirroringEndpointGroupAssociation, + MirroringLocation, + UpdateMirroringDeploymentGroupRequest, + UpdateMirroringDeploymentRequest, + UpdateMirroringEndpointGroupAssociationRequest, + UpdateMirroringEndpointGroupRequest, +) +from .security_profile_group import SecurityProfile, SecurityProfileGroup +from .security_profile_group_intercept import CustomInterceptProfile +from .security_profile_group_mirroring import CustomMirroringProfile +from .security_profile_group_service import ( + CreateSecurityProfileGroupRequest, + CreateSecurityProfileRequest, + DeleteSecurityProfileGroupRequest, + DeleteSecurityProfileRequest, + GetSecurityProfileGroupRequest, + GetSecurityProfileRequest, + ListSecurityProfileGroupsRequest, + ListSecurityProfileGroupsResponse, + ListSecurityProfilesRequest, + ListSecurityProfilesResponse, + UpdateSecurityProfileGroupRequest, + UpdateSecurityProfileRequest, +) +from .security_profile_group_threatprevention import ( + AntivirusOverride, + Protocol, + Severity, + SeverityOverride, + ThreatAction, + ThreatOverride, + ThreatPreventionProfile, + ThreatType, +) +from .security_profile_group_urlfiltering import UrlFilter, UrlFilteringProfile +from .server_tls_policy import ( + CreateServerTlsPolicyRequest, + DeleteServerTlsPolicyRequest, + GetServerTlsPolicyRequest, + ListServerTlsPoliciesRequest, + ListServerTlsPoliciesResponse, + ServerTlsPolicy, + UpdateServerTlsPolicyRequest, +) +from .sse_gateway import ( + CreatePartnerSSEGatewayRequest, + DeletePartnerSSEGatewayRequest, + GetPartnerSSEGatewayRequest, + GetSSEGatewayReferenceRequest, + ListPartnerSSEGatewaysRequest, + ListPartnerSSEGatewaysResponse, + ListSSEGatewayReferencesRequest, + ListSSEGatewayReferencesResponse, + PartnerSSEGateway, + SSEGatewayReference, + UpdatePartnerSSEGatewayRequest, +) +from .sse_realm import ( + CreatePartnerSSERealmRequest, + CreateSACAttachmentRequest, + CreateSACRealmRequest, + DeletePartnerSSERealmRequest, + DeleteSACAttachmentRequest, + DeleteSACRealmRequest, + GetPartnerSSERealmRequest, + GetSACAttachmentRequest, + GetSACRealmRequest, + ListPartnerSSERealmsRequest, + ListPartnerSSERealmsResponse, + ListSACAttachmentsRequest, + ListSACAttachmentsResponse, + ListSACRealmsRequest, + ListSACRealmsResponse, + PartnerSSERealm, + SACAttachment, + SACRealm, +) from .tls import ( CertificateProvider, CertificateProviderInstance, GrpcEndpoint, ValidationCA, ) +from .tls_inspection_policy import ( + CreateTlsInspectionPolicyRequest, + DeleteTlsInspectionPolicyRequest, + GetTlsInspectionPolicyRequest, + ListTlsInspectionPoliciesRequest, + ListTlsInspectionPoliciesResponse, + TlsInspectionPolicy, + UpdateTlsInspectionPolicyRequest, +) +from .url_list import ( + CreateUrlListRequest, + DeleteUrlListRequest, + GetUrlListRequest, + ListUrlListsRequest, + ListUrlListsResponse, + UpdateUrlListRequest, + UrlList, +) __all__ = ( + "AuthorizationPolicy", + "CreateAuthorizationPolicyRequest", + "DeleteAuthorizationPolicyRequest", + "GetAuthorizationPolicyRequest", + "ListAuthorizationPoliciesRequest", + "ListAuthorizationPoliciesResponse", + "UpdateAuthorizationPolicyRequest", + "AuthzPolicy", + "CreateAuthzPolicyRequest", + "DeleteAuthzPolicyRequest", + "GetAuthzPolicyRequest", + "ListAuthzPoliciesRequest", + "ListAuthzPoliciesResponse", + "UpdateAuthzPolicyRequest", + "BackendAuthenticationConfig", + "CreateBackendAuthenticationConfigRequest", + "DeleteBackendAuthenticationConfigRequest", + "GetBackendAuthenticationConfigRequest", + "ListBackendAuthenticationConfigsRequest", + "ListBackendAuthenticationConfigsResponse", + "UpdateBackendAuthenticationConfigRequest", "ClientTlsPolicy", "CreateClientTlsPolicyRequest", "DeleteClientTlsPolicyRequest", @@ -39,8 +280,177 @@ "ListClientTlsPoliciesResponse", "UpdateClientTlsPolicyRequest", "OperationMetadata", + "CreateDnsThreatDetectorRequest", + "DeleteDnsThreatDetectorRequest", + "DnsThreatDetector", + "GetDnsThreatDetectorRequest", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", + "UpdateDnsThreatDetectorRequest", + "CreateFirewallEndpointAssociationRequest", + "CreateFirewallEndpointRequest", + "DeleteFirewallEndpointAssociationRequest", + "DeleteFirewallEndpointRequest", + "FirewallEndpoint", + "FirewallEndpointAssociation", + "GetFirewallEndpointAssociationRequest", + "GetFirewallEndpointRequest", + "ListFirewallEndpointAssociationsRequest", + "ListFirewallEndpointAssociationsResponse", + "ListFirewallEndpointsRequest", + "ListFirewallEndpointsResponse", + "UpdateFirewallEndpointAssociationRequest", + "UpdateFirewallEndpointRequest", + "CreateGatewaySecurityPolicyRequest", + "DeleteGatewaySecurityPolicyRequest", + "GatewaySecurityPolicy", + "GetGatewaySecurityPolicyRequest", + "ListGatewaySecurityPoliciesRequest", + "ListGatewaySecurityPoliciesResponse", + "UpdateGatewaySecurityPolicyRequest", + "CreateGatewaySecurityPolicyRuleRequest", + "DeleteGatewaySecurityPolicyRuleRequest", + "GatewaySecurityPolicyRule", + "GetGatewaySecurityPolicyRuleRequest", + "ListGatewaySecurityPolicyRulesRequest", + "ListGatewaySecurityPolicyRulesResponse", + "UpdateGatewaySecurityPolicyRuleRequest", + "CreateInterceptDeploymentGroupRequest", + "CreateInterceptDeploymentRequest", + "CreateInterceptEndpointGroupAssociationRequest", + "CreateInterceptEndpointGroupRequest", + "DeleteInterceptDeploymentGroupRequest", + "DeleteInterceptDeploymentRequest", + "DeleteInterceptEndpointGroupAssociationRequest", + "DeleteInterceptEndpointGroupRequest", + "GetInterceptDeploymentGroupRequest", + "GetInterceptDeploymentRequest", + "GetInterceptEndpointGroupAssociationRequest", + "GetInterceptEndpointGroupRequest", + "InterceptDeployment", + "InterceptDeploymentGroup", + "InterceptEndpointGroup", + "InterceptEndpointGroupAssociation", + "InterceptLocation", + "ListInterceptDeploymentGroupsRequest", + "ListInterceptDeploymentGroupsResponse", + "ListInterceptDeploymentsRequest", + "ListInterceptDeploymentsResponse", + "ListInterceptEndpointGroupAssociationsRequest", + "ListInterceptEndpointGroupAssociationsResponse", + "ListInterceptEndpointGroupsRequest", + "ListInterceptEndpointGroupsResponse", + "UpdateInterceptDeploymentGroupRequest", + "UpdateInterceptDeploymentRequest", + "UpdateInterceptEndpointGroupAssociationRequest", + "UpdateInterceptEndpointGroupRequest", + "CreateMirroringDeploymentGroupRequest", + "CreateMirroringDeploymentRequest", + "CreateMirroringEndpointGroupAssociationRequest", + "CreateMirroringEndpointGroupRequest", + "DeleteMirroringDeploymentGroupRequest", + "DeleteMirroringDeploymentRequest", + "DeleteMirroringEndpointGroupAssociationRequest", + "DeleteMirroringEndpointGroupRequest", + "GetMirroringDeploymentGroupRequest", + "GetMirroringDeploymentRequest", + "GetMirroringEndpointGroupAssociationRequest", + "GetMirroringEndpointGroupRequest", + "ListMirroringDeploymentGroupsRequest", + "ListMirroringDeploymentGroupsResponse", + "ListMirroringDeploymentsRequest", + "ListMirroringDeploymentsResponse", + "ListMirroringEndpointGroupAssociationsRequest", + "ListMirroringEndpointGroupAssociationsResponse", + "ListMirroringEndpointGroupsRequest", + "ListMirroringEndpointGroupsResponse", + "MirroringDeployment", + "MirroringDeploymentGroup", + "MirroringEndpointGroup", + "MirroringEndpointGroupAssociation", + "MirroringLocation", + "UpdateMirroringDeploymentGroupRequest", + "UpdateMirroringDeploymentRequest", + "UpdateMirroringEndpointGroupAssociationRequest", + "UpdateMirroringEndpointGroupRequest", + "SecurityProfile", + "SecurityProfileGroup", + "CustomInterceptProfile", + "CustomMirroringProfile", + "CreateSecurityProfileGroupRequest", + "CreateSecurityProfileRequest", + "DeleteSecurityProfileGroupRequest", + "DeleteSecurityProfileRequest", + "GetSecurityProfileGroupRequest", + "GetSecurityProfileRequest", + "ListSecurityProfileGroupsRequest", + "ListSecurityProfileGroupsResponse", + "ListSecurityProfilesRequest", + "ListSecurityProfilesResponse", + "UpdateSecurityProfileGroupRequest", + "UpdateSecurityProfileRequest", + "AntivirusOverride", + "SeverityOverride", + "ThreatOverride", + "ThreatPreventionProfile", + "Protocol", + "Severity", + "ThreatAction", + "ThreatType", + "UrlFilter", + "UrlFilteringProfile", + "CreateServerTlsPolicyRequest", + "DeleteServerTlsPolicyRequest", + "GetServerTlsPolicyRequest", + "ListServerTlsPoliciesRequest", + "ListServerTlsPoliciesResponse", + "ServerTlsPolicy", + "UpdateServerTlsPolicyRequest", + "CreatePartnerSSEGatewayRequest", + "DeletePartnerSSEGatewayRequest", + "GetPartnerSSEGatewayRequest", + "GetSSEGatewayReferenceRequest", + "ListPartnerSSEGatewaysRequest", + "ListPartnerSSEGatewaysResponse", + "ListSSEGatewayReferencesRequest", + "ListSSEGatewayReferencesResponse", + "PartnerSSEGateway", + "SSEGatewayReference", + "UpdatePartnerSSEGatewayRequest", + "CreatePartnerSSERealmRequest", + "CreateSACAttachmentRequest", + "CreateSACRealmRequest", + "DeletePartnerSSERealmRequest", + "DeleteSACAttachmentRequest", + "DeleteSACRealmRequest", + "GetPartnerSSERealmRequest", + "GetSACAttachmentRequest", + "GetSACRealmRequest", + "ListPartnerSSERealmsRequest", + "ListPartnerSSERealmsResponse", + "ListSACAttachmentsRequest", + "ListSACAttachmentsResponse", + "ListSACRealmsRequest", + "ListSACRealmsResponse", + "PartnerSSERealm", + "SACAttachment", + "SACRealm", "CertificateProvider", "CertificateProviderInstance", "GrpcEndpoint", "ValidationCA", + "CreateTlsInspectionPolicyRequest", + "DeleteTlsInspectionPolicyRequest", + "GetTlsInspectionPolicyRequest", + "ListTlsInspectionPoliciesRequest", + "ListTlsInspectionPoliciesResponse", + "TlsInspectionPolicy", + "UpdateTlsInspectionPolicyRequest", + "CreateUrlListRequest", + "DeleteUrlListRequest", + "GetUrlListRequest", + "ListUrlListsRequest", + "ListUrlListsResponse", + "UpdateUrlListRequest", + "UrlList", ) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authorization_policy.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authorization_policy.py new file mode 100644 index 000000000000..c73b8366f9d5 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authorization_policy.py @@ -0,0 +1,430 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "AuthorizationPolicy", + "ListAuthorizationPoliciesRequest", + "ListAuthorizationPoliciesResponse", + "GetAuthorizationPolicyRequest", + "CreateAuthorizationPolicyRequest", + "UpdateAuthorizationPolicyRequest", + "DeleteAuthorizationPolicyRequest", + }, +) + + +class AuthorizationPolicy(proto.Message): + r"""AuthorizationPolicy is a resource that specifies how a server + should authorize incoming connections. This resource in itself + does not change the configuration unless it's attached to a + target https proxy or endpoint config selector resource. + + Attributes: + name (str): + Required. Name of the AuthorizationPolicy resource. It + matches pattern + ``projects/{project}/locations/{location}/authorizationPolicies/``. + description (str): + Optional. Free-text description of the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + labels (MutableMapping[str, str]): + Optional. Set of label tags associated with + the AuthorizationPolicy resource. + action (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy.Action): + Required. The action to take when a rule + match is found. Possible values are "ALLOW" or + "DENY". + rules (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthorizationPolicy.Rule]): + Optional. List of rules to match. Note that at least one of + the rules must match in order for the action specified in + the 'action' field to be taken. A rule is a match if there + is a matching source and destination. If left blank, the + action specified in the ``action`` field will be applied on + every request. + """ + + class Action(proto.Enum): + r"""Possible values that define what action to take. + + Values: + ACTION_UNSPECIFIED (0): + Default value. + ALLOW (1): + Grant access. + DENY (2): + Deny access. + Deny rules should be avoided unless they are + used to provide a default "deny all" fallback. + """ + ACTION_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + class Rule(proto.Message): + r"""Specification of rules. + + Attributes: + sources (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthorizationPolicy.Rule.Source]): + Optional. List of attributes for the traffic source. All of + the sources must match. A source is a match if both + principals and ip_blocks match. If not set, the action + specified in the 'action' field will be applied without any + rule checks for the source. + destinations (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthorizationPolicy.Rule.Destination]): + Optional. List of attributes for the traffic + destination. All of the destinations must match. + A destination is a match if a request matches + all the specified hosts, ports, methods and + headers. If not set, the action specified in the + 'action' field will be applied without any rule + checks for the destination. + """ + + class Source(proto.Message): + r"""Specification of traffic source attributes. + + Attributes: + principals (MutableSequence[str]): + Optional. List of peer identities to match for + authorization. At least one principal should match. Each + peer can be an exact match, or a prefix match (example, + "namespace/*") or a suffix match (example, + "*/service-account") or a presence match "\*". Authorization + based on the principal name without certificate validation + (configured by ServerTlsPolicy resource) is considered + insecure. + ip_blocks (MutableSequence[str]): + Optional. List of CIDR ranges to match based + on source IP address. At least one IP block + should match. Single IP (e.g., "1.2.3.4") and + CIDR (e.g., "1.2.3.0/24") are supported. + Authorization based on source IP alone should be + avoided. The IP addresses of any load balancers + or proxies should be considered untrusted. + """ + + principals: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ip_blocks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class Destination(proto.Message): + r"""Specification of traffic destination attributes. + + Attributes: + hosts (MutableSequence[str]): + Required. List of host names to match. Matched against the + ":authority" header in http requests. At least one host + should match. Each host can be an exact match, or a prefix + match (example "mydomain.\ *") or a suffix match (example + "*.myorg.com") or a presence (any) match "\*". + ports (MutableSequence[int]): + Required. List of destination ports to match. + At least one port should match. + methods (MutableSequence[str]): + Optional. A list of HTTP methods to match. At + least one method should match. Should not be set + for gRPC services. + http_header_match (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy.Rule.Destination.HttpHeaderMatch): + Optional. Match against key:value pair in + http header. Provides a flexible match based on + HTTP headers, for potentially advanced use + cases. At least one header should match. Avoid + using header matches to make authorization + decisions unless there is a strong guarantee + that requests arrive through a trusted client or + proxy. + """ + + class HttpHeaderMatch(proto.Message): + r"""Specification of HTTP header match attributes. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + regex_match (str): + Required. The value of the header must match + the regular expression specified in regexMatch. + For regular expression grammar, please see: + en.cppreference.com/w/cpp/regex/ecmascript For + matching against a port specified in the HTTP + request, use a headerMatch with headerName set + to Host and a regular expression that satisfies + the RFC2616 Host header's port specifier. + + This field is a member of `oneof`_ ``type``. + header_name (str): + Required. The name of the HTTP header to + match. For matching against the HTTP request's + authority, use a headerMatch with the header + name ":authority". For matching a request's + method, use the headerName ":method". + """ + + regex_match: str = proto.Field( + proto.STRING, + number=2, + oneof="type", + ) + header_name: str = proto.Field( + proto.STRING, + number=1, + ) + + hosts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + ports: MutableSequence[int] = proto.RepeatedField( + proto.UINT32, + number=2, + ) + methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + http_header_match: "AuthorizationPolicy.Rule.Destination.HttpHeaderMatch" = proto.Field( + proto.MESSAGE, + number=5, + message="AuthorizationPolicy.Rule.Destination.HttpHeaderMatch", + ) + + sources: MutableSequence[ + "AuthorizationPolicy.Rule.Source" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthorizationPolicy.Rule.Source", + ) + destinations: MutableSequence[ + "AuthorizationPolicy.Rule.Destination" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AuthorizationPolicy.Rule.Destination", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + action: Action = proto.Field( + proto.ENUM, + number=6, + enum=Action, + ) + rules: MutableSequence[Rule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=Rule, + ) + + +class ListAuthorizationPoliciesRequest(proto.Message): + r"""Request used with the ListAuthorizationPolicies method. + + Attributes: + parent (str): + Required. The project and location from which the + AuthorizationPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. + page_size (int): + Maximum number of AuthorizationPolicies to + return per call. + page_token (str): + The value returned by the last + ``ListAuthorizationPoliciesResponse`` Indicates that this is + a continuation of a prior ``ListAuthorizationPolicies`` + call, and that the system should return the next page of + data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAuthorizationPoliciesResponse(proto.Message): + r"""Response returned by the ListAuthorizationPolicies method. + + Attributes: + authorization_policies (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthorizationPolicy]): + List of AuthorizationPolicies resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + """ + + @property + def raw_page(self): + return self + + authorization_policies: MutableSequence[ + "AuthorizationPolicy" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthorizationPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAuthorizationPolicyRequest(proto.Message): + r"""Request used by the GetAuthorizationPolicy method. + + Attributes: + name (str): + Required. A name of the AuthorizationPolicy to get. Must be + in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAuthorizationPolicyRequest(proto.Message): + r"""Request used by the CreateAuthorizationPolicy method. + + Attributes: + parent (str): + Required. The parent resource of the AuthorizationPolicy. + Must be in the format + ``projects/{project}/locations/{location}``. + authorization_policy_id (str): + Required. Short name of the AuthorizationPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. "authz_policy". + authorization_policy (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy): + Required. AuthorizationPolicy resource to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + authorization_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + authorization_policy: "AuthorizationPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="AuthorizationPolicy", + ) + + +class UpdateAuthorizationPolicyRequest(proto.Message): + r"""Request used by the UpdateAuthorizationPolicy method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the AuthorizationPolicy resource by the + update. The fields specified in the update_mask are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + authorization_policy (google.cloud.network_security_v1alpha1.types.AuthorizationPolicy): + Required. Updated AuthorizationPolicy + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + authorization_policy: "AuthorizationPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthorizationPolicy", + ) + + +class DeleteAuthorizationPolicyRequest(proto.Message): + r"""Request used by the DeleteAuthorizationPolicy method. + + Attributes: + name (str): + Required. A name of the AuthorizationPolicy to delete. Must + be in the format + ``projects/{project}/locations/{location}/authorizationPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authz_policy.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authz_policy.py new file mode 100644 index 000000000000..d64b78a16ef4 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/authz_policy.py @@ -0,0 +1,1009 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "AuthzPolicy", + "CreateAuthzPolicyRequest", + "ListAuthzPoliciesRequest", + "ListAuthzPoliciesResponse", + "GetAuthzPolicyRequest", + "UpdateAuthzPolicyRequest", + "DeleteAuthzPolicyRequest", + }, +) + + +class AuthzPolicy(proto.Message): + r"""``AuthzPolicy`` is a resource that allows to forward traffic to a + callout backend designed to scan the traffic for security purposes. + + Attributes: + name (str): + Required. Identifier. Name of the ``AuthzPolicy`` resource + in the following format: + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + description (str): + Optional. A human-readable description of the + resource. + labels (MutableMapping[str, str]): + Optional. Set of labels associated with the ``AuthzPolicy`` + resource. + + The format must comply with `the following + requirements `__. + target (google.cloud.network_security_v1alpha1.types.AuthzPolicy.Target): + Required. Specifies the set of resources to + which this policy should be applied to. + http_rules (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule]): + Optional. A list of authorization HTTP rules + to match against the incoming request. A policy + match occurs when at least one HTTP rule matches + the request or when no HTTP rules are specified + in the policy. At least one HTTP Rule is + required for Allow or Deny Action. Limited to 5 + rules. + action (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzAction): + Required. Can be one of ``ALLOW``, ``DENY``, ``CUSTOM``. + + When the action is ``CUSTOM``, ``customProvider`` must be + specified. + + When the action is ``ALLOW``, only requests matching the + policy will be allowed. + + When the action is ``DENY``, only requests matching the + policy will be denied. + + When a request arrives, the policies are evaluated in the + following order: + + 1. If there is a ``CUSTOM`` policy that matches the request, + the ``CUSTOM`` policy is evaluated using the custom + authorization providers and the request is denied if the + provider rejects the request. + + 2. If there are any ``DENY`` policies that match the + request, the request is denied. + + 3. If there are no ``ALLOW`` policies for the resource or if + any of the ``ALLOW`` policies match the request, the + request is allowed. + + 4. Else the request is denied by default if none of the + configured AuthzPolicies with ``ALLOW`` action match the + request. + custom_provider (google.cloud.network_security_v1alpha1.types.AuthzPolicy.CustomProvider): + Optional. Required if the action is ``CUSTOM``. Allows + delegating authorization decisions to Cloud IAP or to + Service Extensions. One of ``cloudIap`` or + ``authzExtension`` must be specified. + """ + + class LoadBalancingScheme(proto.Enum): + r"""Load balancing schemes supported by the ``AuthzPolicy`` resource. + The valid values are ``INTERNAL_MANAGED`` and ``EXTERNAL_MANAGED``. + For more information, refer to `Backend services + overview `__. + + Values: + LOAD_BALANCING_SCHEME_UNSPECIFIED (0): + Default value. Do not use. + INTERNAL_MANAGED (1): + Signifies that this is used for Regional + internal or Cross-region internal Application + Load Balancing. + EXTERNAL_MANAGED (2): + Signifies that this is used for Global + external or Regional external Application Load + Balancing. + INTERNAL_SELF_MANAGED (3): + Signifies that this is used for Cloud Service + Mesh. Meant for use by CSM GKE controller only. + """ + LOAD_BALANCING_SCHEME_UNSPECIFIED = 0 + INTERNAL_MANAGED = 1 + EXTERNAL_MANAGED = 2 + INTERNAL_SELF_MANAGED = 3 + + class AuthzAction(proto.Enum): + r"""The action to be applied to this policy. Valid values are ``ALLOW``, + ``DENY``, ``CUSTOM``. + + Values: + AUTHZ_ACTION_UNSPECIFIED (0): + Unspecified action. + ALLOW (1): + Allow request to pass through to the backend. + DENY (2): + Deny the request and return a HTTP 404 to the + client. + CUSTOM (3): + Delegate the authorization decision to an + external authorization engine. + """ + AUTHZ_ACTION_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + CUSTOM = 3 + + class Target(proto.Message): + r"""Specifies the set of targets to which this policy should be + applied to. + + Attributes: + load_balancing_scheme (google.cloud.network_security_v1alpha1.types.AuthzPolicy.LoadBalancingScheme): + Required. All gateways and forwarding rules referenced by + this policy and extensions must share the same load + balancing scheme. Supported values: ``INTERNAL_MANAGED`` and + ``EXTERNAL_MANAGED``. For more information, refer to + `Backend services + overview `__. + resources (MutableSequence[str]): + Required. A list of references to the + Forwarding Rules on which this policy will be + applied. + """ + + load_balancing_scheme: "AuthzPolicy.LoadBalancingScheme" = proto.Field( + proto.ENUM, + number=8, + enum="AuthzPolicy.LoadBalancingScheme", + ) + resources: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class AuthzRule(proto.Message): + r"""Conditions to match against the incoming request. + + Attributes: + from_ (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.From): + Optional. Describes properties of a source of + a request. + to (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.To): + Optional. Describes properties of a target of + a request. + when (str): + Optional. CEL expression that describes the + conditions to be satisfied for the action. The + result of the CEL expression is ANDed with the + from and to. Refer to the CEL language reference + for a list of available attributes. + """ + + class StringMatch(proto.Message): + r"""Determines how a string value should be matched. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + exact (str): + The input string must match exactly the string specified + here. + + Examples: + + - ``abc`` only matches the value ``abc``. + + This field is a member of `oneof`_ ``match_pattern``. + prefix (str): + The input string must have the prefix specified here. Note: + empty prefix is not allowed, please use regex instead. + + Examples: + + - ``abc`` matches the value ``abc.xyz`` + + This field is a member of `oneof`_ ``match_pattern``. + suffix (str): + The input string must have the suffix specified here. Note: + empty prefix is not allowed, please use regex instead. + + Examples: + + - ``abc`` matches the value ``xyz.abc`` + + This field is a member of `oneof`_ ``match_pattern``. + contains (str): + The input string must have the substring specified here. + Note: empty contains match is not allowed, please use regex + instead. + + Examples: + + - ``abc`` matches the value ``xyz.abc.def`` + + This field is a member of `oneof`_ ``match_pattern``. + ignore_case (bool): + If true, indicates the exact/prefix/suffix/contains matching + should be case insensitive. For example, the matcher + ``data`` will match both input string ``Data`` and ``data`` + if set to true. + """ + + exact: str = proto.Field( + proto.STRING, + number=1, + oneof="match_pattern", + ) + prefix: str = proto.Field( + proto.STRING, + number=2, + oneof="match_pattern", + ) + suffix: str = proto.Field( + proto.STRING, + number=3, + oneof="match_pattern", + ) + contains: str = proto.Field( + proto.STRING, + number=4, + oneof="match_pattern", + ) + ignore_case: bool = proto.Field( + proto.BOOL, + number=5, + ) + + class IpBlock(proto.Message): + r"""Represents a range of IP Addresses. + + Attributes: + prefix (str): + Required. The address prefix. + length (int): + Required. The length of the address range. + """ + + prefix: str = proto.Field( + proto.STRING, + number=1, + ) + length: int = proto.Field( + proto.INT32, + number=2, + ) + + class RequestResource(proto.Message): + r"""Describes the properties of a client VM resource accessing + the internal application load balancers. + + Attributes: + tag_value_id_set (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.RequestResource.TagValueIdSet): + Optional. A list of resource tag value + permanent IDs to match against the resource + manager tags value associated with the source VM + of a request. + iam_service_account (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.StringMatch): + Optional. An IAM service account to match + against the source service account of the VM + sending the request. + """ + + class TagValueIdSet(proto.Message): + r"""Describes a set of resource tag value permanent IDs to match + against the resource manager tags value associated with the + source VM of a request. + + Attributes: + ids (MutableSequence[int]): + Required. A list of resource tag value + permanent IDs to match against the resource + manager tags value associated with the source VM + of a request. The match follows AND semantics + which means all the ids must match. Limited to 5 + ids in the Tag value id set. + """ + + ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=1, + ) + + tag_value_id_set: "AuthzPolicy.AuthzRule.RequestResource.TagValueIdSet" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.RequestResource.TagValueIdSet", + ) + ) + iam_service_account: "AuthzPolicy.AuthzRule.StringMatch" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.StringMatch", + ) + + class HeaderMatch(proto.Message): + r"""Determines how a HTTP header should be matched. + + Attributes: + name (str): + Optional. Specifies the name of the header in + the request. + value (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.StringMatch): + Optional. Specifies how the header match will + be performed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: "AuthzPolicy.AuthzRule.StringMatch" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.StringMatch", + ) + + class Principal(proto.Message): + r"""Describes the properties of a principal to be matched + against. + + Attributes: + principal_selector (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.Principal.PrincipalSelector): + Optional. An enum to decide what principal value the + principal rule will match against. If not specified, the + PrincipalSelector is CLIENT_CERT_URI_SAN. + principal (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.StringMatch): + Required. A non-empty string whose value is matched against + the principal value based on the principal_selector. Only + exact match can be applied for CLIENT_CERT_URI_SAN, + CLIENT_CERT_DNS_NAME_SAN, CLIENT_CERT_COMMON_NAME selectors. + """ + + class PrincipalSelector(proto.Enum): + r"""The principal value the principal rule will match against. + + Values: + PRINCIPAL_SELECTOR_UNSPECIFIED (0): + Unspecified principal selector. It will be treated as + CLIENT_CERT_URI_SAN by default. + CLIENT_CERT_URI_SAN (1): + The principal rule is matched against a list + of URI SANs in the validated client's + certificate. A match happens when there is any + exact URI SAN value match. This is the default + principal selector. + CLIENT_CERT_DNS_NAME_SAN (2): + The principal rule is matched against a list of DNS Name + SANs in the validated client's certificate. A match happens + when there is any exact DNS Name SAN value match. This is + only applicable for Application Load Balancers except for + classic Global External Application load balancer. + CLIENT_CERT_DNS_NAME_SAN is not supported for + INTERNAL_SELF_MANAGED load balancing scheme. + CLIENT_CERT_COMMON_NAME (3): + The principal rule is matched against the common name in the + client's certificate. Authorization against multiple common + names in the client certificate is not supported. Requests + with multiple common names in the client certificate will be + rejected if CLIENT_CERT_COMMON_NAME is set as the principal + selector. A match happens when there is an exact common name + value match. This is only applicable for Application Load + Balancers except for global external Application Load + Balancer and classic Application Load Balancer. + CLIENT_CERT_COMMON_NAME is not supported for + INTERNAL_SELF_MANAGED load balancing scheme. + """ + PRINCIPAL_SELECTOR_UNSPECIFIED = 0 + CLIENT_CERT_URI_SAN = 1 + CLIENT_CERT_DNS_NAME_SAN = 2 + CLIENT_CERT_COMMON_NAME = 3 + + principal_selector: "AuthzPolicy.AuthzRule.Principal.PrincipalSelector" = ( + proto.Field( + proto.ENUM, + number=1, + enum="AuthzPolicy.AuthzRule.Principal.PrincipalSelector", + ) + ) + principal: "AuthzPolicy.AuthzRule.StringMatch" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.StringMatch", + ) + + class From(proto.Message): + r"""Describes properties of one or more sources of a request. + + Attributes: + sources (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.From.RequestSource]): + Optional. Describes the properties of a + request's sources. At least one of sources or + notSources must be specified. Limited to 1 + source. A match occurs when ANY source (in + sources or notSources) matches the request. + Within a single source, the match follows AND + semantics across fields and OR semantics within + a single field, i.e. a match occurs when ANY + principal matches AND ANY ipBlocks match. + not_sources (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.From.RequestSource]): + Optional. Describes the negated properties of + request sources. Matches requests from sources + that do not match the criteria specified in this + field. At least one of sources or notSources + must be specified. + """ + + class RequestSource(proto.Message): + r"""Describes the properties of a single source. + + Attributes: + principals (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.Principal]): + Optional. A list of identities derived from + the client's certificate. This field will not + match on a request unless frontend mutual TLS is + enabled for the forwarding rule or Gateway and + the client certificate has been successfully + validated by mTLS. + Each identity is a string whose value is matched + against a list of URI SANs, DNS Name SANs, or + the common name in the client's certificate. A + match happens when any principal matches with + the rule. Limited to 50 principals per + Authorization Policy for regional internal + Application Load Balancers, regional external + Application Load Balancers, cross-region + internal Application Load Balancers, and Cloud + Service Mesh. This field is not supported for + global external Application Load Balancers. + ip_blocks (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.IpBlock]): + Optional. A list of IP addresses or IP address ranges to + match against the source IP address of the request. Limited + to 10 ip_blocks per Authorization Policy + resources (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.RequestResource]): + Optional. A list of resources to match + against the resource of the source VM of a + request. Limited to 10 resources per + Authorization Policy. + """ + + principals: MutableSequence[ + "AuthzPolicy.AuthzRule.Principal" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.Principal", + ) + ip_blocks: MutableSequence[ + "AuthzPolicy.AuthzRule.IpBlock" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.IpBlock", + ) + resources: MutableSequence[ + "AuthzPolicy.AuthzRule.RequestResource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="AuthzPolicy.AuthzRule.RequestResource", + ) + + sources: MutableSequence[ + "AuthzPolicy.AuthzRule.From.RequestSource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.From.RequestSource", + ) + not_sources: MutableSequence[ + "AuthzPolicy.AuthzRule.From.RequestSource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.From.RequestSource", + ) + + class To(proto.Message): + r"""Describes properties of one or more targets of a request. + + Attributes: + operations (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.To.RequestOperation]): + Optional. Describes properties of one or more + targets of a request. At least one of operations + or notOperations must be specified. Limited to 1 + operation. A match occurs when ANY operation (in + operations or notOperations) matches. Within an + operation, the match follows AND semantics + across fields and OR semantics within a field, + i.e. a match occurs when ANY path matches AND + ANY header matches and ANY method matches. + not_operations (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.To.RequestOperation]): + Optional. Describes the negated properties of + the targets of a request. Matches requests for + operations that do not match the criteria + specified in this field. At least one of + operations or notOperations must be specified. + """ + + class RequestOperation(proto.Message): + r"""Describes properties of one or more targets of a request. + + Attributes: + header_set (google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.To.RequestOperation.HeaderSet): + Optional. A list of headers to match against + in http header. + hosts (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.StringMatch]): + Optional. A list of HTTP Hosts to match + against. The match can be one of exact, prefix, + suffix, or contains (substring match). Matches + are always case sensitive unless the ignoreCase + is set. Limited to 10 hosts per Authorization + Policy. + paths (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.StringMatch]): + Optional. A list of paths to match against. + The match can be one of exact, prefix, suffix, + or contains (substring match). Matches are + always case sensitive unless the ignoreCase is + set. Limited to 10 paths per Authorization + Policy. + Note that this path match includes the query + parameters. For gRPC services, this should be a + fully-qualified name of the form + /package.service/method. + methods (MutableSequence[str]): + Optional. A list of HTTP methods to match + against. Each entry must be a valid HTTP method + name (GET, PUT, POST, HEAD, PATCH, DELETE, + OPTIONS). It only allows exact match and is + always case sensitive. Limited to 10 methods per + Authorization Policy. + """ + + class HeaderSet(proto.Message): + r"""Describes a set of HTTP headers to match against. + + Attributes: + headers (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy.AuthzRule.HeaderMatch]): + Required. A list of headers to match against + in http header. The match can be one of exact, + prefix, suffix, or contains (substring match). + The match follows AND semantics which means all + the headers must match. Matches are always case + sensitive unless the ignoreCase is set. Limited + to 10 headers per Authorization Policy. + """ + + headers: MutableSequence[ + "AuthzPolicy.AuthzRule.HeaderMatch" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.HeaderMatch", + ) + + header_set: "AuthzPolicy.AuthzRule.To.RequestOperation.HeaderSet" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.To.RequestOperation.HeaderSet", + ) + ) + hosts: MutableSequence[ + "AuthzPolicy.AuthzRule.StringMatch" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.StringMatch", + ) + paths: MutableSequence[ + "AuthzPolicy.AuthzRule.StringMatch" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="AuthzPolicy.AuthzRule.StringMatch", + ) + methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + operations: MutableSequence[ + "AuthzPolicy.AuthzRule.To.RequestOperation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.To.RequestOperation", + ) + not_operations: MutableSequence[ + "AuthzPolicy.AuthzRule.To.RequestOperation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.To.RequestOperation", + ) + + from_: "AuthzPolicy.AuthzRule.From" = proto.Field( + proto.MESSAGE, + number=1, + message="AuthzPolicy.AuthzRule.From", + ) + to: "AuthzPolicy.AuthzRule.To" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy.AuthzRule.To", + ) + when: str = proto.Field( + proto.STRING, + number=3, + ) + + class CustomProvider(proto.Message): + r"""Allows delegating authorization decisions to Cloud IAP or to + Service Extensions. + + Attributes: + cloud_iap (google.cloud.network_security_v1alpha1.types.AuthzPolicy.CustomProvider.CloudIap): + Optional. Delegates authorization decisions + to Cloud IAP. Applicable only for managed load + balancers. Enabling Cloud IAP at the AuthzPolicy + level is not compatible with Cloud IAP settings + in the BackendService. Enabling IAP in both + places will result in request failure. Ensure + that IAP is enabled in either the AuthzPolicy or + the BackendService but not in both places. + authz_extension (google.cloud.network_security_v1alpha1.types.AuthzPolicy.CustomProvider.AuthzExtension): + Optional. Delegate authorization decision to + user authored Service Extension. Only one of + cloudIap or authzExtension can be specified. + """ + + class CloudIap(proto.Message): + r"""Optional. Delegates authorization decisions to Cloud IAP. + Applicable only for managed load balancers. Enabling Cloud IAP + at the AuthzPolicy level is not compatible with Cloud IAP + settings in the BackendService. Enabling IAP in both places will + result in request failure. Ensure that IAP is enabled in either + the AuthzPolicy or the BackendService but not in both places. + + """ + + class AuthzExtension(proto.Message): + r"""Optional. Delegate authorization decision to user authored + extension. Only one of cloudIap or authzExtension can be + specified. + + Attributes: + resources (MutableSequence[str]): + Required. A list of references to + authorization extensions that will be invoked + for requests matching this policy. Limited to 1 + custom provider. + """ + + resources: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + cloud_iap: "AuthzPolicy.CustomProvider.CloudIap" = proto.Field( + proto.MESSAGE, + number=1, + message="AuthzPolicy.CustomProvider.CloudIap", + ) + authz_extension: "AuthzPolicy.CustomProvider.AuthzExtension" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy.CustomProvider.AuthzExtension", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + target: Target = proto.Field( + proto.MESSAGE, + number=6, + message=Target, + ) + http_rules: MutableSequence[AuthzRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=AuthzRule, + ) + action: AuthzAction = proto.Field( + proto.ENUM, + number=8, + enum=AuthzAction, + ) + custom_provider: CustomProvider = proto.Field( + proto.MESSAGE, + number=10, + message=CustomProvider, + ) + + +class CreateAuthzPolicyRequest(proto.Message): + r"""Message for creating an ``AuthzPolicy`` resource. + + Attributes: + parent (str): + Required. The parent resource of the ``AuthzPolicy`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + authz_policy_id (str): + Required. User-provided ID of the ``AuthzPolicy`` resource + to be created. + authz_policy (google.cloud.network_security_v1alpha1.types.AuthzPolicy): + Required. ``AuthzPolicy`` resource to be created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for at + least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + authz_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + authz_policy: "AuthzPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="AuthzPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAuthzPoliciesRequest(proto.Message): + r"""Message for requesting list of ``AuthzPolicy`` resources. + + Attributes: + parent (str): + Required. The project and location from which the + ``AuthzPolicy`` resources are listed, specified in the + following format: + ``projects/{project}/locations/{location}``. + page_size (int): + Optional. Requested page size. The server + might return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results that the server returns. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListAuthzPoliciesResponse(proto.Message): + r"""Message for response to listing ``AuthzPolicy`` resources. + + Attributes: + authz_policies (MutableSequence[google.cloud.network_security_v1alpha1.types.AuthzPolicy]): + The list of ``AuthzPolicy`` resources. + next_page_token (str): + A token identifying a page of results that + the server returns. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + authz_policies: MutableSequence["AuthzPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AuthzPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetAuthzPolicyRequest(proto.Message): + r"""Message for getting a ``AuthzPolicy`` resource. + + Attributes: + name (str): + Required. A name of the ``AuthzPolicy`` resource to get. + Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAuthzPolicyRequest(proto.Message): + r"""Message for updating an ``AuthzPolicy`` resource. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Used to specify the fields to be overwritten in + the ``AuthzPolicy`` resource by the update. The fields + specified in the ``update_mask`` are relative to the + resource, not the full request. A field is overwritten if it + is in the mask. If the user does not specify a mask, then + all fields are overwritten. + authz_policy (google.cloud.network_security_v1alpha1.types.AuthzPolicy): + Required. ``AuthzPolicy`` resource being updated. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for at + least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + authz_policy: "AuthzPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="AuthzPolicy", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteAuthzPolicyRequest(proto.Message): + r"""Message for deleting an ``AuthzPolicy`` resource. + + Attributes: + name (str): + Required. The name of the ``AuthzPolicy`` resource to + delete. Must be in the format + ``projects/{project}/locations/{location}/authzPolicies/{authz_policy}``. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for at + least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, ignores the second request. This prevents + clients from accidentally creating duplicate + commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/backend_authentication_config.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/backend_authentication_config.py new file mode 100644 index 000000000000..91b8f7932e4b --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/backend_authentication_config.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "BackendAuthenticationConfig", + "ListBackendAuthenticationConfigsRequest", + "ListBackendAuthenticationConfigsResponse", + "GetBackendAuthenticationConfigRequest", + "CreateBackendAuthenticationConfigRequest", + "UpdateBackendAuthenticationConfigRequest", + "DeleteBackendAuthenticationConfigRequest", + }, +) + + +class BackendAuthenticationConfig(proto.Message): + r"""BackendAuthenticationConfig message groups the TrustConfig together + with other settings that control how the load balancer + authenticates, and expresses its identity to, the backend: + + - ``trustConfig`` is the attached TrustConfig. + + - ``wellKnownRoots`` indicates whether the load balance should trust + backend server certificates that are issued by public certificate + authorities, in addition to certificates trusted by the + TrustConfig. + + - ``clientCertificate`` is a client certificate that the load + balancer uses to express its identity to the backend, if the + connection to the backend uses mTLS. + + You can attach the BackendAuthenticationConfig to the load + balancer's BackendService directly determining how that + BackendService negotiates TLS. + + Attributes: + name (str): + Required. Name of the BackendAuthenticationConfig resource. + It matches the pattern + ``projects/*/locations/{location}/backendAuthenticationConfigs/{backend_authentication_config}`` + description (str): + Optional. Free-text description of the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + labels (MutableMapping[str, str]): + Set of label tags associated with the + resource. + client_certificate (str): + Optional. A reference to a + certificatemanager.googleapis.com.Certificate resource. This + is a relative resource path following the form + "projects/{project}/locations/{location}/certificates/{certificate}". + + Used by a BackendService to negotiate mTLS when the backend + connection uses TLS and the backend requests a client + certificate. Must have a CLIENT_AUTH scope. + trust_config (str): + Optional. A reference to a TrustConfig resource from the + certificatemanager.googleapis.com namespace. This is a + relative resource path following the form + "projects/{project}/locations/{location}/trustConfigs/{trust_config}". + + A BackendService uses the chain of trust represented by this + TrustConfig, if specified, to validate the server + certificates presented by the backend. Required unless + wellKnownRoots is set to PUBLIC_ROOTS. + well_known_roots (google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig.WellKnownRoots): + Well known roots to use for server + certificate validation. + etag (str): + Output only. Etag of the resource. + """ + + class WellKnownRoots(proto.Enum): + r"""Enum to specify the well known roots to use for server + certificate validation. + + Values: + WELL_KNOWN_ROOTS_UNSPECIFIED (0): + Equivalent to NONE. + NONE (1): + The BackendService will only validate server + certificates against roots specified in + TrustConfig. + PUBLIC_ROOTS (2): + The BackendService uses a set of well-known + public roots, in addition to any roots specified + in the trustConfig field, when validating the + server certificates presented by the backend. + Validation with these roots is only considered + when the TlsSettings.sni field in the + BackendService is set. + + The well-known roots are a set of root CAs + managed by Google. CAs in this set can be added + or removed without notice. + """ + WELL_KNOWN_ROOTS_UNSPECIFIED = 0 + NONE = 1 + PUBLIC_ROOTS = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + client_certificate: str = proto.Field( + proto.STRING, + number=6, + ) + trust_config: str = proto.Field( + proto.STRING, + number=7, + ) + well_known_roots: WellKnownRoots = proto.Field( + proto.ENUM, + number=8, + enum=WellKnownRoots, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + + +class ListBackendAuthenticationConfigsRequest(proto.Message): + r"""Request used by the ListBackendAuthenticationConfigs method. + + Attributes: + parent (str): + Required. The project and location from which the + BackendAuthenticationConfigs should be listed, specified in + the format ``projects/*/locations/{location}``. + page_size (int): + Maximum number of + BackendAuthenticationConfigs to return per call. + page_token (str): + The value returned by the last + ``ListBackendAuthenticationConfigsResponse`` Indicates that + this is a continuation of a prior + ``ListBackendAuthenticationConfigs`` call, and that the + system should return the next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBackendAuthenticationConfigsResponse(proto.Message): + r"""Response returned by the ListBackendAuthenticationConfigs + method. + + Attributes: + backend_authentication_configs (MutableSequence[google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig]): + List of BackendAuthenticationConfig + resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backend_authentication_configs: MutableSequence[ + "BackendAuthenticationConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackendAuthenticationConfig", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackendAuthenticationConfigRequest(proto.Message): + r"""Request used by the GetBackendAuthenticationConfig method. + + Attributes: + name (str): + Required. A name of the BackendAuthenticationConfig to get. + Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateBackendAuthenticationConfigRequest(proto.Message): + r"""Request used by the CreateBackendAuthenticationConfig method. + + Attributes: + parent (str): + Required. The parent resource of the + BackendAuthenticationConfig. Must be in the format + ``projects/*/locations/{location}``. + backend_authentication_config_id (str): + Required. Short name of the + BackendAuthenticationConfig resource to be + created. This value should be 1-63 characters + long, containing only letters, numbers, hyphens, + and underscores, and should not start with a + number. E.g. "backend-auth-config". + backend_authentication_config (google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig): + Required. BackendAuthenticationConfig + resource to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backend_authentication_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + backend_authentication_config: "BackendAuthenticationConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="BackendAuthenticationConfig", + ) + + +class UpdateBackendAuthenticationConfigRequest(proto.Message): + r"""Request used by UpdateBackendAuthenticationConfig method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the BackendAuthenticationConfig resource by + the update. The fields specified in the update_mask are + relative to the resource, not the full request. A field will + be overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + backend_authentication_config (google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig): + Required. Updated BackendAuthenticationConfig + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backend_authentication_config: "BackendAuthenticationConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="BackendAuthenticationConfig", + ) + + +class DeleteBackendAuthenticationConfigRequest(proto.Message): + r"""Request used by the DeleteBackendAuthenticationConfig method. + + Attributes: + name (str): + Required. A name of the BackendAuthenticationConfig to + delete. Must be in the format + ``projects/*/locations/{location}/backendAuthenticationConfigs/*``. + etag (str): + Optional. Etag of the resource. + If this is provided, it must match the server's + etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/dns_threat_detector.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/dns_threat_detector.py new file mode 100644 index 000000000000..795f304608cd --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/dns_threat_detector.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "DnsThreatDetector", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", + "GetDnsThreatDetectorRequest", + "CreateDnsThreatDetectorRequest", + "UpdateDnsThreatDetectorRequest", + "DeleteDnsThreatDetectorRequest", + }, +) + + +class DnsThreatDetector(proto.Message): + r"""A DNS threat detector sends DNS query logs to a *provider* that then + analyzes the logs to identify threat events in the DNS queries. By + default, all VPC networks in your projects are included. You can + exclude specific networks by supplying ``excluded_networks``. + + Attributes: + name (str): + Immutable. Identifier. Name of the + DnsThreatDetector resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp. + labels (MutableMapping[str, str]): + Optional. Any labels associated with the + DnsThreatDetector, listed as key value pairs. + excluded_networks (MutableSequence[str]): + Optional. A list of network resource names which aren't + monitored by this DnsThreatDetector. + + Example: + ``projects/PROJECT_ID/global/networks/NETWORK_NAME``. + provider (google.cloud.network_security_v1alpha1.types.DnsThreatDetector.Provider): + Required. The provider used for DNS threat + analysis. + """ + + class Provider(proto.Enum): + r"""Name of the provider used for DNS threat analysis. + + Values: + PROVIDER_UNSPECIFIED (0): + An unspecified provider. + INFOBLOX (1): + The Infoblox DNS threat detector provider. + """ + PROVIDER_UNSPECIFIED = 0 + INFOBLOX = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + excluded_networks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + provider: Provider = proto.Field( + proto.ENUM, + number=6, + enum=Provider, + ) + + +class ListDnsThreatDetectorsRequest(proto.Message): + r"""The message for requesting a list of DnsThreatDetectors in + the project. + + Attributes: + parent (str): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + page_size (int): + Optional. The requested page size. The server + may return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A page token received from a previous + ``ListDnsThreatDetectorsRequest`` call. Provide this to + retrieve the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDnsThreatDetectorsResponse(proto.Message): + r"""The response message to requesting a list of + DnsThreatDetectors. + + Attributes: + dns_threat_detectors (MutableSequence[google.cloud.network_security_v1alpha1.types.DnsThreatDetector]): + The list of DnsThreatDetector resources. + next_page_token (str): + A token, which can be sent as ``page_token``, to retrieve + the next page. + unreachable (MutableSequence[str]): + Unordered list. Unreachable ``DnsThreatDetector`` resources. + """ + + @property + def raw_page(self): + return self + + dns_threat_detectors: MutableSequence["DnsThreatDetector"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DnsThreatDetector", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDnsThreatDetectorRequest(proto.Message): + r"""The message sent to get a DnsThreatDetector. + + Attributes: + name (str): + Required. Name of the DnsThreatDetector + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDnsThreatDetectorRequest(proto.Message): + r"""The message to create a DnsThreatDetector. + + Attributes: + parent (str): + Required. The value for the parent of the + DnsThreatDetector resource. + dns_threat_detector_id (str): + Optional. The ID of the requesting + DnsThreatDetector object. If this field is not + supplied, the service generates an identifier. + dns_threat_detector (google.cloud.network_security_v1alpha1.types.DnsThreatDetector): + Required. The ``DnsThreatDetector`` resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dns_threat_detector_id: str = proto.Field( + proto.STRING, + number=2, + ) + dns_threat_detector: "DnsThreatDetector" = proto.Field( + proto.MESSAGE, + number=3, + message="DnsThreatDetector", + ) + + +class UpdateDnsThreatDetectorRequest(proto.Message): + r"""The message for updating a DnsThreatDetector. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The field mask is used to specify the fields to be + overwritten in the DnsThreatDetector resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the mask is not provided then all + fields present in the request will be overwritten. + dns_threat_detector (google.cloud.network_security_v1alpha1.types.DnsThreatDetector): + Required. The DnsThreatDetector resource + being updated. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + dns_threat_detector: "DnsThreatDetector" = proto.Field( + proto.MESSAGE, + number=2, + message="DnsThreatDetector", + ) + + +class DeleteDnsThreatDetectorRequest(proto.Message): + r"""The message for deleting a DnsThreatDetector. + + Attributes: + name (str): + Required. Name of the DnsThreatDetector + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/firewall_activation.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/firewall_activation.py new file mode 100644 index 000000000000..9e0c4557c7ae --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/firewall_activation.py @@ -0,0 +1,791 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "FirewallEndpoint", + "ListFirewallEndpointsRequest", + "ListFirewallEndpointsResponse", + "GetFirewallEndpointRequest", + "CreateFirewallEndpointRequest", + "UpdateFirewallEndpointRequest", + "DeleteFirewallEndpointRequest", + "FirewallEndpointAssociation", + "ListFirewallEndpointAssociationsRequest", + "ListFirewallEndpointAssociationsResponse", + "GetFirewallEndpointAssociationRequest", + "CreateFirewallEndpointAssociationRequest", + "DeleteFirewallEndpointAssociationRequest", + "UpdateFirewallEndpointAssociationRequest", + }, +) + + +class FirewallEndpoint(proto.Message): + r"""Message describing Endpoint object. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Immutable. Identifier. Name of resource. + description (str): + Optional. Description of the firewall + endpoint. Max length 2048 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + state (google.cloud.network_security_v1alpha1.types.FirewallEndpoint.State): + Output only. Current state of the endpoint. + reconciling (bool): + Output only. Whether reconciling is in + progress, recommended per + https://google.aip.dev/128. + associated_networks (MutableSequence[str]): + Output only. List of networks that are + associated with this endpoint in the local zone. + This is a projection of the + FirewallEndpointAssociations pointing at this + endpoint. A network will only appear in this + list after traffic routing is fully configured. + Format: + + projects/{project}/global/networks/{name}. + associations (MutableSequence[google.cloud.network_security_v1alpha1.types.FirewallEndpoint.AssociationReference]): + Output only. List of + FirewallEndpointAssociations that are associated + to this endpoint. An association will only + appear in this list after traffic routing is + fully configured. + satisfies_pzs (bool): + Output only. [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzi``. + billing_project_id (str): + Required. Project to bill on endpoint uptime + usage. + endpoint_settings (google.cloud.network_security_v1alpha1.types.FirewallEndpoint.EndpointSettings): + Optional. Settings for the endpoint. + """ + + class State(proto.Enum): + r"""Endpoint state. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Being created. + ACTIVE (2): + Processing configuration updates. + DELETING (3): + Being deleted. + INACTIVE (4): + Down or in an error state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + class AssociationReference(proto.Message): + r"""This is a subset of the FirewallEndpointAssociation message, + containing fields to be used by the consumer. + + Attributes: + name (str): + Output only. The resource name of the + FirewallEndpointAssociation. Format: + + projects/{project}/locations/{location}/firewallEndpointAssociations/{id} + network (str): + Output only. The VPC network associated. + Format: + projects/{project}/global/networks/{name}. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + + class EndpointSettings(proto.Message): + r"""Settings for the endpoint. + + Attributes: + jumbo_frames_enabled (bool): + Optional. Immutable. Indicates whether Jumbo + Frames are enabled. Default value is false. + """ + + jumbo_frames_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=6, + ) + associated_networks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + associations: MutableSequence[AssociationReference] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=AssociationReference, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=14, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=15, + optional=True, + ) + billing_project_id: str = proto.Field( + proto.STRING, + number=8, + ) + endpoint_settings: EndpointSettings = proto.Field( + proto.MESSAGE, + number=19, + message=EndpointSettings, + ) + + +class ListFirewallEndpointsRequest(proto.Message): + r"""Message for requesting list of Endpoints + + Attributes: + parent (str): + Required. Parent value for + ListEndpointsRequest + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListFirewallEndpointsResponse(proto.Message): + r"""Message for response to listing Endpoints + + Attributes: + firewall_endpoints (MutableSequence[google.cloud.network_security_v1alpha1.types.FirewallEndpoint]): + The list of Endpoint + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + firewall_endpoints: MutableSequence["FirewallEndpoint"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FirewallEndpoint", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetFirewallEndpointRequest(proto.Message): + r"""Message for getting a Endpoint + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateFirewallEndpointRequest(proto.Message): + r"""Message for creating a Endpoint + + Attributes: + parent (str): + Required. Value for parent. + firewall_endpoint_id (str): + Required. Id of the requesting object. If auto-generating Id + server-side, remove this field and firewall_endpoint_id from + the method_signature of Create RPC. + firewall_endpoint (google.cloud.network_security_v1alpha1.types.FirewallEndpoint): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + firewall_endpoint_id: str = proto.Field( + proto.STRING, + number=2, + ) + firewall_endpoint: "FirewallEndpoint" = proto.Field( + proto.MESSAGE, + number=3, + message="FirewallEndpoint", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateFirewallEndpointRequest(proto.Message): + r"""Message for updating a Endpoint + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Endpoint resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + firewall_endpoint (google.cloud.network_security_v1alpha1.types.FirewallEndpoint): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + firewall_endpoint: "FirewallEndpoint" = proto.Field( + proto.MESSAGE, + number=2, + message="FirewallEndpoint", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteFirewallEndpointRequest(proto.Message): + r"""Message for deleting a Endpoint + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FirewallEndpointAssociation(proto.Message): + r"""Message describing Association object + + Attributes: + name (str): + Immutable. Identifier. name of resource + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + state (google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation.State): + Output only. Current state of the + association. + network (str): + Required. The URL of the network that is + being associated. + firewall_endpoint (str): + Required. The URL of the FirewallEndpoint + that is being associated. + tls_inspection_policy (str): + Optional. The URL of the TlsInspectionPolicy + that is being associated. + reconciling (bool): + Output only. Whether reconciling is in + progress, recommended per + https://google.aip.dev/128. + disabled (bool): + Optional. Whether the association is + disabled. True indicates that traffic won't be + intercepted + """ + + class State(proto.Enum): + r"""Association state. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Being created. + ACTIVE (2): + Active and ready for traffic. + DELETING (3): + Being deleted. + INACTIVE (4): + Down or in an error state. + ORPHAN (5): + The project that housed the association has + been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + ORPHAN = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + network: str = proto.Field( + proto.STRING, + number=6, + ) + firewall_endpoint: str = proto.Field( + proto.STRING, + number=7, + ) + tls_inspection_policy: str = proto.Field( + proto.STRING, + number=8, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=9, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=10, + ) + + +class ListFirewallEndpointAssociationsRequest(proto.Message): + r"""Message for requesting list of Associations + + Attributes: + parent (str): + Required. Parent value for + ListAssociationsRequest + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Optional. Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListFirewallEndpointAssociationsResponse(proto.Message): + r"""Message for response to listing Associations + + Attributes: + firewall_endpoint_associations (MutableSequence[google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation]): + The list of Association + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + firewall_endpoint_associations: MutableSequence[ + "FirewallEndpointAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FirewallEndpointAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetFirewallEndpointAssociationRequest(proto.Message): + r"""Message for getting a Association + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateFirewallEndpointAssociationRequest(proto.Message): + r"""Message for creating a Association + + Attributes: + parent (str): + Required. Value for parent. + firewall_endpoint_association_id (str): + Optional. Id of the requesting object. If auto-generating Id + server-side, remove this field and + firewall_endpoint_association_id from the method_signature + of Create RPC. + firewall_endpoint_association (google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + firewall_endpoint_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + firewall_endpoint_association: "FirewallEndpointAssociation" = proto.Field( + proto.MESSAGE, + number=3, + message="FirewallEndpointAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteFirewallEndpointAssociationRequest(proto.Message): + r"""Message for deleting a Association + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateFirewallEndpointAssociationRequest(proto.Message): + r"""Message for updating an Association + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Association resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + firewall_endpoint_association (google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + firewall_endpoint_association: "FirewallEndpointAssociation" = proto.Field( + proto.MESSAGE, + number=2, + message="FirewallEndpointAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy.py new file mode 100644 index 000000000000..66866bc693c2 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "GatewaySecurityPolicy", + "CreateGatewaySecurityPolicyRequest", + "ListGatewaySecurityPoliciesRequest", + "ListGatewaySecurityPoliciesResponse", + "GetGatewaySecurityPolicyRequest", + "DeleteGatewaySecurityPolicyRequest", + "UpdateGatewaySecurityPolicyRequest", + }, +) + + +class GatewaySecurityPolicy(proto.Message): + r"""The GatewaySecurityPolicy resource contains a collection of + GatewaySecurityPolicyRules and associated metadata. + + Attributes: + name (str): + Required. Name of the resource. Name is of the form + projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy} + gateway_security_policy should match the + pattern:(^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + description (str): + Optional. Free-text description of the + resource. + tls_inspection_policy (str): + Optional. Name of a TLS Inspection Policy + resource that defines how TLS inspection will be + performed for any rule(s) which enables it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + tls_inspection_policy: str = proto.Field( + proto.STRING, + number=5, + ) + + +class CreateGatewaySecurityPolicyRequest(proto.Message): + r"""Request used by the CreateGatewaySecurityPolicy method. + + Attributes: + parent (str): + Required. The parent resource of the GatewaySecurityPolicy. + Must be in the format + ``projects/{project}/locations/{location}``. + gateway_security_policy_id (str): + Required. Short name of the GatewaySecurityPolicy resource + to be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. + "gateway_security_policy1". + gateway_security_policy (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy): + Required. GatewaySecurityPolicy resource to + be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + gateway_security_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + gateway_security_policy: "GatewaySecurityPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="GatewaySecurityPolicy", + ) + + +class ListGatewaySecurityPoliciesRequest(proto.Message): + r"""Request used with the ListGatewaySecurityPolicies method. + + Attributes: + parent (str): + Required. The project and location from which the + GatewaySecurityPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. + page_size (int): + Maximum number of GatewaySecurityPolicies to + return per call. + page_token (str): + The value returned by the last + 'ListGatewaySecurityPoliciesResponse' Indicates + that this is a continuation of a prior + 'ListGatewaySecurityPolicies' call, and that the + system should return the next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGatewaySecurityPoliciesResponse(proto.Message): + r"""Response returned by the ListGatewaySecurityPolicies method. + + Attributes: + gateway_security_policies (MutableSequence[google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy]): + List of GatewaySecurityPolicies resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then 'next_page_token' is included. To get the + next set of results, call this method again using the value + of 'next_page_token' as 'page_token'. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + gateway_security_policies: MutableSequence[ + "GatewaySecurityPolicy" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GatewaySecurityPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetGatewaySecurityPolicyRequest(proto.Message): + r"""Request used by the GetGatewaySecurityPolicy method. + + Attributes: + name (str): + Required. A name of the GatewaySecurityPolicy to get. Must + be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteGatewaySecurityPolicyRequest(proto.Message): + r"""Request used by the DeleteGatewaySecurityPolicy method. + + Attributes: + name (str): + Required. A name of the GatewaySecurityPolicy to delete. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateGatewaySecurityPolicyRequest(proto.Message): + r"""Request used by the UpdateGatewaySecurityPolicy method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + gateway_security_policy (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy): + Required. Updated GatewaySecurityPolicy + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + gateway_security_policy: "GatewaySecurityPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="GatewaySecurityPolicy", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy_rule.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy_rule.py new file mode 100644 index 000000000000..7cdea1a2eb7c --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/gateway_security_policy_rule.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "GatewaySecurityPolicyRule", + "CreateGatewaySecurityPolicyRuleRequest", + "GetGatewaySecurityPolicyRuleRequest", + "UpdateGatewaySecurityPolicyRuleRequest", + "ListGatewaySecurityPolicyRulesRequest", + "ListGatewaySecurityPolicyRulesResponse", + "DeleteGatewaySecurityPolicyRuleRequest", + }, +) + + +class GatewaySecurityPolicyRule(proto.Message): + r"""The GatewaySecurityPolicyRule resource is in a nested + collection within a GatewaySecurityPolicy and represents a + traffic matching condition and associated action to perform. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + basic_profile (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule.BasicProfile): + Required. Profile which tells what the + primitive action should be. + + This field is a member of `oneof`_ ``profile``. + name (str): + Required. Immutable. Name of the resource. ame is the full + resource name so + projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy}/rules/{rule} + rule should match the pattern: + (^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the rule was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the rule was updated. + enabled (bool): + Required. Whether the rule is enforced. + priority (int): + Required. Priority of the rule. + Lower number corresponds to higher precedence. + description (str): + Optional. Free-text description of the + resource. + session_matcher (str): + Required. CEL expression for matching on + session criteria. + application_matcher (str): + Optional. CEL expression for matching on + L7/application level criteria. + tls_inspection_enabled (bool): + Optional. Flag to enable TLS inspection of traffic matching + on , can only be true if the parent + GatewaySecurityPolicy references a TLSInspectionConfig. + """ + + class BasicProfile(proto.Enum): + r"""enum to define the primitive action. + + Values: + BASIC_PROFILE_UNSPECIFIED (0): + If there is not a mentioned action for the + target. + ALLOW (1): + Allow the matched traffic. + DENY (2): + Deny the matched traffic. + """ + BASIC_PROFILE_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + basic_profile: BasicProfile = proto.Field( + proto.ENUM, + number=9, + oneof="profile", + enum=BasicProfile, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + priority: int = proto.Field( + proto.INT32, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + session_matcher: str = proto.Field( + proto.STRING, + number=7, + ) + application_matcher: str = proto.Field( + proto.STRING, + number=8, + ) + tls_inspection_enabled: bool = proto.Field( + proto.BOOL, + number=10, + ) + + +class CreateGatewaySecurityPolicyRuleRequest(proto.Message): + r"""Methods for GatewaySecurityPolicy + RULES/GatewaySecurityPolicyRules. Request used by the + CreateGatewaySecurityPolicyRule method. + + Attributes: + parent (str): + Required. The parent where this rule will be created. Format + : + projects/{project}/location/{location}/gatewaySecurityPolicies/\* + gateway_security_policy_rule (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule): + Required. The rule to be created. + gateway_security_policy_rule_id (str): + The ID to use for the rule, which will become the final + component of the rule's resource name. This value should be + 4-63 characters, and valid characters are /[a-z][0-9]-/. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + gateway_security_policy_rule: "GatewaySecurityPolicyRule" = proto.Field( + proto.MESSAGE, + number=2, + message="GatewaySecurityPolicyRule", + ) + gateway_security_policy_rule_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetGatewaySecurityPolicyRuleRequest(proto.Message): + r"""Request used by the GetGatewaySecurityPolicyRule method. + + Attributes: + name (str): + Required. The name of the GatewaySecurityPolicyRule to + retrieve. Format: + projects/{project}/location/{location}/gatewaySecurityPolicies/*/rules/* + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateGatewaySecurityPolicyRuleRequest(proto.Message): + r"""Request used by the UpdateGatewaySecurityPolicyRule method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the GatewaySecurityPolicy resource by the + update. The fields specified in the update_mask are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + gateway_security_policy_rule (google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule): + Required. Updated GatewaySecurityPolicyRule + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + gateway_security_policy_rule: "GatewaySecurityPolicyRule" = proto.Field( + proto.MESSAGE, + number=2, + message="GatewaySecurityPolicyRule", + ) + + +class ListGatewaySecurityPolicyRulesRequest(proto.Message): + r"""Request used with the ListGatewaySecurityPolicyRules method. + + Attributes: + parent (str): + Required. The project, location and GatewaySecurityPolicy + from which the GatewaySecurityPolicyRules should be listed, + specified in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}``. + page_size (int): + Maximum number of GatewaySecurityPolicyRules + to return per call. + page_token (str): + The value returned by the last + 'ListGatewaySecurityPolicyRulesResponse' + Indicates that this is a continuation of a prior + 'ListGatewaySecurityPolicyRules' call, and that + the system should return the next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListGatewaySecurityPolicyRulesResponse(proto.Message): + r"""Response returned by the ListGatewaySecurityPolicyRules + method. + + Attributes: + gateway_security_policy_rules (MutableSequence[google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule]): + List of GatewaySecurityPolicyRule resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then 'next_page_token' is included. To get the + next set of results, call this method again using the value + of 'next_page_token' as 'page_token'. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + gateway_security_policy_rules: MutableSequence[ + "GatewaySecurityPolicyRule" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GatewaySecurityPolicyRule", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteGatewaySecurityPolicyRuleRequest(proto.Message): + r"""Request used by the DeleteGatewaySecurityPolicyRule method. + + Attributes: + name (str): + Required. A name of the GatewaySecurityPolicyRule to delete. + Must be in the format + ``projects/{project}/locations/{location}/gatewaySecurityPolicies/{gatewaySecurityPolicy}/rules/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/intercept.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/intercept.py new file mode 100644 index 000000000000..2bec4797ab94 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/intercept.py @@ -0,0 +1,1575 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "InterceptEndpointGroup", + "ListInterceptEndpointGroupsRequest", + "ListInterceptEndpointGroupsResponse", + "GetInterceptEndpointGroupRequest", + "CreateInterceptEndpointGroupRequest", + "UpdateInterceptEndpointGroupRequest", + "DeleteInterceptEndpointGroupRequest", + "InterceptEndpointGroupAssociation", + "ListInterceptEndpointGroupAssociationsRequest", + "ListInterceptEndpointGroupAssociationsResponse", + "GetInterceptEndpointGroupAssociationRequest", + "CreateInterceptEndpointGroupAssociationRequest", + "UpdateInterceptEndpointGroupAssociationRequest", + "DeleteInterceptEndpointGroupAssociationRequest", + "InterceptDeploymentGroup", + "ListInterceptDeploymentGroupsRequest", + "ListInterceptDeploymentGroupsResponse", + "GetInterceptDeploymentGroupRequest", + "CreateInterceptDeploymentGroupRequest", + "UpdateInterceptDeploymentGroupRequest", + "DeleteInterceptDeploymentGroupRequest", + "InterceptDeployment", + "ListInterceptDeploymentsRequest", + "ListInterceptDeploymentsResponse", + "GetInterceptDeploymentRequest", + "CreateInterceptDeploymentRequest", + "UpdateInterceptDeploymentRequest", + "DeleteInterceptDeploymentRequest", + "InterceptLocation", + }, +) + + +class InterceptEndpointGroup(proto.Message): + r"""An endpoint group is a consumer frontend for a deployment + group (backend). In order to configure intercept for a network, + consumers must create: + + - An association between their network and the endpoint group. + - A security profile that points to the endpoint group. + - A firewall rule that references the security profile (group). + + Attributes: + name (str): + Immutable. Identifier. The resource name of this endpoint + group, for example: + ``projects/123456789/locations/global/interceptEndpointGroups/my-eg``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + intercept_deployment_group (str): + Required. Immutable. The deployment group that this endpoint + group is connected to, for example: + ``projects/123456789/locations/global/interceptDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + connected_deployment_group (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup.ConnectedDeploymentGroup): + Output only. Details about the connected + deployment group to this endpoint group. + state (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup.State): + Output only. The current state of the + endpoint group. See https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This is part of the normal operation (e.g. + adding a new association to the group). See + https://google.aip.dev/128. + associations (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup.AssociationDetails]): + Output only. List of associations to this + endpoint group. + description (str): + Optional. User-provided description of the + endpoint group. Used as additional context for + the endpoint group. + """ + + class State(proto.Enum): + r"""Endpoint group state. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The endpoint group is ready and in sync with + the target deployment group. + CLOSED (2): + The deployment group backing this endpoint + group has been force-deleted. This endpoint + group cannot be used and interception is + effectively disabled. + CREATING (3): + The endpoint group is being created. + DELETING (4): + The endpoint group is being deleted. + OUT_OF_SYNC (5): + The endpoint group is out of sync with the + backing deployment group. In most cases, this is + a result of a transient issue within the system + (e.g. an inaccessible location) and the system + is expected to recover automatically. See the + associations field for details per network and + location. + DELETE_FAILED (6): + An attempt to delete the endpoint group has + failed. This is a terminal state and the + endpoint group is not expected to recover. The + only permitted operation is to retry deleting + the endpoint group. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOSED = 2 + CREATING = 3 + DELETING = 4 + OUT_OF_SYNC = 5 + DELETE_FAILED = 6 + + class ConnectedDeploymentGroup(proto.Message): + r"""The endpoint group's view of a connected deployment group. + + Attributes: + name (str): + Output only. The connected deployment group's resource name, + for example: + ``projects/123456789/locations/global/interceptDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptLocation]): + Output only. The list of locations where the + deployment group is present. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + locations: MutableSequence["InterceptLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="InterceptLocation", + ) + + class AssociationDetails(proto.Message): + r"""The endpoint group's view of a connected association. + + Attributes: + name (str): + Output only. The connected association's resource name, for + example: + ``projects/123456789/locations/global/interceptEndpointGroupAssociations/my-ega``. + See https://google.aip.dev/124. + network (str): + Output only. The associated network, for + example: + projects/123456789/global/networks/my-network. + See https://google.aip.dev/124. + state (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation.State): + Output only. Most recent known state of the + association. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + state: "InterceptEndpointGroupAssociation.State" = proto.Field( + proto.ENUM, + number=3, + enum="InterceptEndpointGroupAssociation.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + intercept_deployment_group: str = proto.Field( + proto.STRING, + number=5, + ) + connected_deployment_group: ConnectedDeploymentGroup = proto.Field( + proto.MESSAGE, + number=11, + message=ConnectedDeploymentGroup, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + associations: MutableSequence[AssociationDetails] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=AssociationDetails, + ) + description: str = proto.Field( + proto.STRING, + number=10, + ) + + +class ListInterceptEndpointGroupsRequest(proto.Message): + r"""Request message for ListInterceptEndpointGroups. + + Attributes: + parent (str): + Required. The parent, which owns this collection of endpoint + groups. Example: ``projects/123456789/locations/global``. + See https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListInterceptEndpointGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListInterceptEndpointGroups`` must + match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInterceptEndpointGroupsResponse(proto.Message): + r"""Response message for ListInterceptEndpointGroups. + + Attributes: + intercept_endpoint_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup]): + The endpoint groups from the specified + parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + intercept_endpoint_groups: MutableSequence[ + "InterceptEndpointGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InterceptEndpointGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInterceptEndpointGroupRequest(proto.Message): + r"""Request message for GetInterceptEndpointGroup. + + Attributes: + name (str): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInterceptEndpointGroupRequest(proto.Message): + r"""Request message for CreateInterceptEndpointGroup. + + Attributes: + parent (str): + Required. The parent resource where this + endpoint group will be created. Format: + projects/{project}/locations/{location} + intercept_endpoint_group_id (str): + Required. The ID to use for the endpoint + group, which will become the final component of + the endpoint group's resource name. + intercept_endpoint_group (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup): + Required. The endpoint group to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intercept_endpoint_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + intercept_endpoint_group: "InterceptEndpointGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="InterceptEndpointGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInterceptEndpointGroupRequest(proto.Message): + r"""Request message for UpdateInterceptEndpointGroup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the endpoint group (e.g. ``description``; *not* + ``intercept_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + intercept_endpoint_group (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup): + Required. The endpoint group to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + intercept_endpoint_group: "InterceptEndpointGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="InterceptEndpointGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInterceptEndpointGroupRequest(proto.Message): + r"""Request message for DeleteInterceptEndpointGroup. + + Attributes: + name (str): + Required. The endpoint group to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InterceptEndpointGroupAssociation(proto.Message): + r"""An endpoint group association represents a link between a + network and an endpoint group in the organization. + + Creating an association creates the networking infrastructure + linking the network to the endpoint group, but does not enable + intercept by itself. To enable intercept, the user must also + create a network firewall policy containing intercept rules and + associate it with the network. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this endpoint + group association, for example: + ``projects/123456789/locations/global/interceptEndpointGroupAssociations/my-eg-association``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + intercept_endpoint_group (str): + Required. Immutable. The endpoint group that this + association is connected to, for example: + ``projects/123456789/locations/global/interceptEndpointGroups/my-eg``. + See https://google.aip.dev/124. + network (str): + Required. Immutable. The VPC network that is associated. for + example: ``projects/123456789/global/networks/my-network``. + See https://google.aip.dev/124. + locations_details (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation.LocationDetails]): + Output only. The list of locations where the + association is present. This information is + retrieved from the linked endpoint group, and + not configured as part of the association + itself. + state (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation.State): + Output only. Current state of the endpoint + group association. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This part of the normal operation (e.g. + adding a new location to the target deployment + group). See https://google.aip.dev/128. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptLocation]): + Output only. The list of locations where the + association is configured. This information is + retrieved from the linked endpoint group. + """ + + class State(proto.Enum): + r"""The state of the association. + + Values: + STATE_UNSPECIFIED (0): + Not set. + ACTIVE (1): + The association is ready and in sync with the + linked endpoint group. + CREATING (2): + The association is being created. + DELETING (3): + The association is being deleted. + CLOSED (4): + The association is disabled due to a breaking + change in another resource. + OUT_OF_SYNC (5): + The association is out of sync with the linked endpoint + group. In most cases, this is a result of a transient issue + within the system (e.g. an inaccessible location) and the + system is expected to recover automatically. Check the + ``locations_details`` field for more details. + DELETE_FAILED (6): + An attempt to delete the association has + failed. This is a terminal state and the + association is not expected to be usable as some + of its resources have been deleted. + The only permitted operation is to retry + deleting the association. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + CLOSED = 4 + OUT_OF_SYNC = 5 + DELETE_FAILED = 6 + + class LocationDetails(proto.Message): + r"""Contains details about the state of an association in a + specific cloud location. + + Attributes: + location (str): + Output only. The cloud location, e.g. + "us-central1-a" or "asia-south1". + state (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation.LocationDetails.State): + Output only. The current state of the + association in this location. + """ + + class State(proto.Enum): + r"""The state of association. + + Values: + STATE_UNSPECIFIED (0): + Not set. + ACTIVE (1): + The association is ready and in sync with the + linked endpoint group. + OUT_OF_SYNC (2): + The association is out of sync with the + linked endpoint group. In most cases, this is a + result of a transient issue within the system + (e.g. an inaccessible location) and the system + is expected to recover automatically. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + OUT_OF_SYNC = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + state: "InterceptEndpointGroupAssociation.LocationDetails.State" = proto.Field( + proto.ENUM, + number=2, + enum="InterceptEndpointGroupAssociation.LocationDetails.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + intercept_endpoint_group: str = proto.Field( + proto.STRING, + number=5, + ) + network: str = proto.Field( + proto.STRING, + number=6, + ) + locations_details: MutableSequence[LocationDetails] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=LocationDetails, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=9, + ) + locations: MutableSequence["InterceptLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="InterceptLocation", + ) + + +class ListInterceptEndpointGroupAssociationsRequest(proto.Message): + r"""Request message for ListInterceptEndpointGroupAssociations. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListInterceptEndpointGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListInterceptEndpointGroups`` must + match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInterceptEndpointGroupAssociationsResponse(proto.Message): + r"""Response message for ListInterceptEndpointGroupAssociations. + + Attributes: + intercept_endpoint_group_associations (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation]): + The associations from the specified parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + intercept_endpoint_group_associations: MutableSequence[ + "InterceptEndpointGroupAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InterceptEndpointGroupAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInterceptEndpointGroupAssociationRequest(proto.Message): + r"""Request message for GetInterceptEndpointGroupAssociation. + + Attributes: + name (str): + Required. The name of the association to retrieve. Format: + projects/{project}/locations/{location}/interceptEndpointGroupAssociations/{intercept_endpoint_group_association} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInterceptEndpointGroupAssociationRequest(proto.Message): + r"""Request message for CreateInterceptEndpointGroupAssociation. + + Attributes: + parent (str): + Required. The parent resource where this + association will be created. Format: + projects/{project}/locations/{location} + intercept_endpoint_group_association_id (str): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's resource name. + If not provided, the server will generate a + unique ID. + intercept_endpoint_group_association (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation): + Required. The association to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intercept_endpoint_group_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + intercept_endpoint_group_association: "InterceptEndpointGroupAssociation" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="InterceptEndpointGroupAssociation", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInterceptEndpointGroupAssociationRequest(proto.Message): + r"""Request message for UpdateInterceptEndpointGroupAssociation. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the association (e.g. ``description``; *not* + ``intercept_endpoint_group_association.description``). See + https://google.aip.dev/161 for more details. + intercept_endpoint_group_association (google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation): + Required. The association to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + intercept_endpoint_group_association: "InterceptEndpointGroupAssociation" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="InterceptEndpointGroupAssociation", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInterceptEndpointGroupAssociationRequest(proto.Message): + r"""Request message for DeleteInterceptEndpointGroupAssociation. + + Attributes: + name (str): + Required. The association to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InterceptDeploymentGroup(proto.Message): + r"""A deployment group aggregates many zonal intercept backends + (deployments) into a single global intercept service. Consumers + can connect this service using an endpoint group. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this deployment + group, for example: + ``projects/123456789/locations/global/interceptDeploymentGroups/my-dg``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + network (str): + Required. Immutable. The network that will be used for all + child deployments, for example: + ``projects/{project}/global/networks/{network}``. See + https://google.aip.dev/124. + connected_endpoint_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup.ConnectedEndpointGroup]): + Output only. The list of endpoint groups that + are connected to this resource. + nested_deployments (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup.Deployment]): + Output only. The list of Intercept + Deployments that belong to this group. + state (google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup.State): + Output only. The current state of the + deployment group. See + https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This is part of the normal operation (e.g. + adding a new deployment to the group) See + https://google.aip.dev/128. + description (str): + Optional. User-provided description of the + deployment group. Used as additional context for + the deployment group. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptLocation]): + Output only. The list of locations where the + deployment group is present. + """ + + class State(proto.Enum): + r"""The current state of the deployment group. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The deployment group is ready. + CREATING (2): + The deployment group is being created. + DELETING (3): + The deployment group is being deleted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + + class ConnectedEndpointGroup(proto.Message): + r"""An endpoint group connected to this deployment group. + + Attributes: + name (str): + Output only. The connected endpoint group's resource name, + for example: + ``projects/123456789/locations/global/interceptEndpointGroups/my-eg``. + See https://google.aip.dev/124. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Deployment(proto.Message): + r"""A deployment belonging to this deployment group. + + Attributes: + name (str): + Output only. The name of the Intercept Deployment, in the + format: + ``projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment}``. + state (google.cloud.network_security_v1alpha1.types.InterceptDeployment.State): + Output only. Most recent known state of the + deployment. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: "InterceptDeployment.State" = proto.Field( + proto.ENUM, + number=2, + enum="InterceptDeployment.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + connected_endpoint_groups: MutableSequence[ + ConnectedEndpointGroup + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=ConnectedEndpointGroup, + ) + nested_deployments: MutableSequence[Deployment] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=Deployment, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + locations: MutableSequence["InterceptLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="InterceptLocation", + ) + + +class ListInterceptDeploymentGroupsRequest(proto.Message): + r"""Request message for ListInterceptDeploymentGroups. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListInterceptDeploymentGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListInterceptDeploymentGroups`` + must match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInterceptDeploymentGroupsResponse(proto.Message): + r"""Response message for ListInterceptDeploymentGroups. + + Attributes: + intercept_deployment_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup]): + The deployment groups from the specified + parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + intercept_deployment_groups: MutableSequence[ + "InterceptDeploymentGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InterceptDeploymentGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInterceptDeploymentGroupRequest(proto.Message): + r"""Request message for GetInterceptDeploymentGroup. + + Attributes: + name (str): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/interceptDeploymentGroups/{intercept_deployment_group} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInterceptDeploymentGroupRequest(proto.Message): + r"""Request message for CreateInterceptDeploymentGroup. + + Attributes: + parent (str): + Required. The parent resource where this + deployment group will be created. Format: + projects/{project}/locations/{location} + intercept_deployment_group_id (str): + Required. The ID to use for the new + deployment group, which will become the final + component of the deployment group's resource + name. + intercept_deployment_group (google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup): + Required. The deployment group to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intercept_deployment_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + intercept_deployment_group: "InterceptDeploymentGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="InterceptDeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInterceptDeploymentGroupRequest(proto.Message): + r"""Request message for UpdateInterceptDeploymentGroup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the deployment group (e.g. ``description``; + *not* ``intercept_deployment_group.description``). See + https://google.aip.dev/161 for more details. + intercept_deployment_group (google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup): + Required. The deployment group to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + intercept_deployment_group: "InterceptDeploymentGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="InterceptDeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInterceptDeploymentGroupRequest(proto.Message): + r"""Request message for DeleteInterceptDeploymentGroup. + + Attributes: + name (str): + Required. The deployment group to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InterceptDeployment(proto.Message): + r"""A deployment represents a zonal intercept backend ready to + accept GENEVE-encapsulated traffic, e.g. a zonal instance group + fronted by an internal passthrough load balancer. Deployments + are always part of a global deployment group which represents a + global intercept service. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this deployment, + for example: + ``projects/123456789/locations/us-central1-a/interceptDeployments/my-dep``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + forwarding_rule (str): + Required. Immutable. The regional forwarding rule that + fronts the interceptors, for example: + ``projects/123456789/regions/us-central1/forwardingRules/my-rule``. + See https://google.aip.dev/124. + intercept_deployment_group (str): + Required. Immutable. The deployment group that this + deployment is a part of, for example: + ``projects/123456789/locations/global/interceptDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + state (google.cloud.network_security_v1alpha1.types.InterceptDeployment.State): + Output only. The current state of the + deployment. See https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This part of the normal operation (e.g. + linking a new association to the parent group). + See https://google.aip.dev/128. + description (str): + Optional. User-provided description of the + deployment. Used as additional context for the + deployment. + """ + + class State(proto.Enum): + r"""The current state of the deployment. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The deployment is ready and in sync with the + parent group. + CREATING (2): + The deployment is being created. + DELETING (3): + The deployment is being deleted. + OUT_OF_SYNC (4): + The deployment is out of sync with the parent + group. In most cases, this is a result of a + transient issue within the system (e.g. a + delayed data-path config) and the system is + expected to recover automatically. See the + parent deployment group's state for more + details. + DELETE_FAILED (5): + An attempt to delete the deployment has + failed. This is a terminal state and the + deployment is not expected to recover. The only + permitted operation is to retry deleting the + deployment. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + OUT_OF_SYNC = 4 + DELETE_FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=5, + ) + intercept_deployment_group: str = proto.Field( + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + + +class ListInterceptDeploymentsRequest(proto.Message): + r"""Request message for ListInterceptDeployments. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListInterceptDeployments`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListInterceptDeployments`` must match the call + that provided the page token. See https://google.aip.dev/158 + for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInterceptDeploymentsResponse(proto.Message): + r"""Response message for ListInterceptDeployments. + + Attributes: + intercept_deployments (MutableSequence[google.cloud.network_security_v1alpha1.types.InterceptDeployment]): + The deployments from the specified parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + intercept_deployments: MutableSequence["InterceptDeployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InterceptDeployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInterceptDeploymentRequest(proto.Message): + r"""Request message for GetInterceptDeployment. + + Attributes: + name (str): + Required. The name of the deployment to retrieve. Format: + projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInterceptDeploymentRequest(proto.Message): + r"""Request message for CreateInterceptDeployment. + + Attributes: + parent (str): + Required. The parent resource where this + deployment will be created. Format: + projects/{project}/locations/{location} + intercept_deployment_id (str): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource name. + intercept_deployment (google.cloud.network_security_v1alpha1.types.InterceptDeployment): + Required. The deployment to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intercept_deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + intercept_deployment: "InterceptDeployment" = proto.Field( + proto.MESSAGE, + number=3, + message="InterceptDeployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInterceptDeploymentRequest(proto.Message): + r"""Request message for UpdateInterceptDeployment. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the deployment (e.g. ``description``; *not* + ``intercept_deployment.description``). See + https://google.aip.dev/161 for more details. + intercept_deployment (google.cloud.network_security_v1alpha1.types.InterceptDeployment): + Required. The deployment to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + intercept_deployment: "InterceptDeployment" = proto.Field( + proto.MESSAGE, + number=2, + message="InterceptDeployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInterceptDeploymentRequest(proto.Message): + r"""Request message for DeleteInterceptDeployment. + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InterceptLocation(proto.Message): + r"""Details about intercept in a specific cloud location. + + Attributes: + location (str): + Output only. The cloud location, e.g. + "us-central1-a" or "asia-south1". + state (google.cloud.network_security_v1alpha1.types.InterceptLocation.State): + Output only. The current state of the + association in this location. + """ + + class State(proto.Enum): + r"""The current state of a resource in the location. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The resource is ready and in sync in the + location. + OUT_OF_SYNC (2): + The resource is out of sync in the location. + In most cases, this is a result of a transient + issue within the system (e.g. an inaccessible + location) and the system is expected to recover + automatically. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + OUT_OF_SYNC = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/mirroring.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/mirroring.py new file mode 100644 index 000000000000..93177b499953 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/mirroring.py @@ -0,0 +1,1603 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "MirroringEndpointGroup", + "ListMirroringEndpointGroupsRequest", + "ListMirroringEndpointGroupsResponse", + "GetMirroringEndpointGroupRequest", + "CreateMirroringEndpointGroupRequest", + "UpdateMirroringEndpointGroupRequest", + "DeleteMirroringEndpointGroupRequest", + "MirroringEndpointGroupAssociation", + "ListMirroringEndpointGroupAssociationsRequest", + "ListMirroringEndpointGroupAssociationsResponse", + "GetMirroringEndpointGroupAssociationRequest", + "CreateMirroringEndpointGroupAssociationRequest", + "UpdateMirroringEndpointGroupAssociationRequest", + "DeleteMirroringEndpointGroupAssociationRequest", + "MirroringDeploymentGroup", + "ListMirroringDeploymentGroupsRequest", + "ListMirroringDeploymentGroupsResponse", + "GetMirroringDeploymentGroupRequest", + "CreateMirroringDeploymentGroupRequest", + "UpdateMirroringDeploymentGroupRequest", + "DeleteMirroringDeploymentGroupRequest", + "MirroringDeployment", + "ListMirroringDeploymentsRequest", + "ListMirroringDeploymentsResponse", + "GetMirroringDeploymentRequest", + "CreateMirroringDeploymentRequest", + "UpdateMirroringDeploymentRequest", + "DeleteMirroringDeploymentRequest", + "MirroringLocation", + }, +) + + +class MirroringEndpointGroup(proto.Message): + r"""An endpoint group is a consumer frontend for a deployment + group (backend). In order to configure mirroring for a network, + consumers must create: + + - An association between their network and the endpoint group. + - A security profile that points to the endpoint group. + - A mirroring rule that references the security profile (group). + + Attributes: + name (str): + Immutable. Identifier. The resource name of this endpoint + group, for example: + ``projects/123456789/locations/global/mirroringEndpointGroups/my-eg``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + mirroring_deployment_group (str): + Immutable. The deployment group that this DIRECT endpoint + group is connected to, for example: + ``projects/123456789/locations/global/mirroringDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + connected_deployment_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup.ConnectedDeploymentGroup]): + Output only. List of details about the + connected deployment groups to this endpoint + group. + state (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup.State): + Output only. The current state of the + endpoint group. See https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This is part of the normal operation (e.g. + adding a new association to the group). See + https://google.aip.dev/128. + type_ (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup.Type): + Immutable. The type of the endpoint group. + If left unspecified, defaults to DIRECT. + associations (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup.AssociationDetails]): + Output only. List of associations to this + endpoint group. + description (str): + Optional. User-provided description of the + endpoint group. Used as additional context for + the endpoint group. + """ + + class State(proto.Enum): + r"""The current state of the endpoint group. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The endpoint group is ready and in sync with + the target deployment group. + CLOSED (2): + The deployment group backing this endpoint + group has been force-deleted. This endpoint + group cannot be used and mirroring is + effectively disabled. + CREATING (3): + The endpoint group is being created. + DELETING (4): + The endpoint group is being deleted. + OUT_OF_SYNC (5): + The endpoint group is out of sync with the + backing deployment group. In most cases, this is + a result of a transient issue within the system + (e.g. an inaccessible location) and the system + is expected to recover automatically. See the + associations field for details per network and + location. + DELETE_FAILED (6): + An attempt to delete the endpoint group has + failed. This is a terminal state and the + endpoint group is not expected to recover. The + only permitted operation is to retry deleting + the endpoint group. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOSED = 2 + CREATING = 3 + DELETING = 4 + OUT_OF_SYNC = 5 + DELETE_FAILED = 6 + + class Type(proto.Enum): + r"""The type of the endpoint group. + + Values: + TYPE_UNSPECIFIED (0): + Not set. + DIRECT (1): + An endpoint group that sends packets to a + single deployment group. + """ + TYPE_UNSPECIFIED = 0 + DIRECT = 1 + + class ConnectedDeploymentGroup(proto.Message): + r"""The endpoint group's view of a connected deployment group. + + Attributes: + name (str): + Output only. The connected deployment group's resource name, + for example: + ``projects/123456789/locations/global/mirroringDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringLocation]): + Output only. The list of locations where the + deployment group is present. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + locations: MutableSequence["MirroringLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MirroringLocation", + ) + + class AssociationDetails(proto.Message): + r"""The endpoint group's view of a connected association. + + Attributes: + name (str): + Output only. The connected association's resource name, for + example: + ``projects/123456789/locations/global/mirroringEndpointGroupAssociations/my-ega``. + See https://google.aip.dev/124. + network (str): + Output only. The associated network, for + example: + projects/123456789/global/networks/my-network. + See https://google.aip.dev/124. + state (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation.State): + Output only. Most recent known state of the + association. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + state: "MirroringEndpointGroupAssociation.State" = proto.Field( + proto.ENUM, + number=3, + enum="MirroringEndpointGroupAssociation.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + mirroring_deployment_group: str = proto.Field( + proto.STRING, + number=5, + ) + connected_deployment_groups: MutableSequence[ + ConnectedDeploymentGroup + ] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=ConnectedDeploymentGroup, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + type_: Type = proto.Field( + proto.ENUM, + number=11, + enum=Type, + ) + associations: MutableSequence[AssociationDetails] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=AssociationDetails, + ) + description: str = proto.Field( + proto.STRING, + number=10, + ) + + +class ListMirroringEndpointGroupsRequest(proto.Message): + r"""Request message for ListMirroringEndpointGroups. + + Attributes: + parent (str): + Required. The parent, which owns this collection of endpoint + groups. Example: ``projects/123456789/locations/global``. + See https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListMirroringEndpointGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListMirroringEndpointGroups`` must + match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMirroringEndpointGroupsResponse(proto.Message): + r"""Response message for ListMirroringEndpointGroups. + + Attributes: + mirroring_endpoint_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup]): + The endpoint groups from the specified + parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + mirroring_endpoint_groups: MutableSequence[ + "MirroringEndpointGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MirroringEndpointGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMirroringEndpointGroupRequest(proto.Message): + r"""Request message for GetMirroringEndpointGroup. + + Attributes: + name (str): + Required. The name of the endpoint group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMirroringEndpointGroupRequest(proto.Message): + r"""Request message for CreateMirroringEndpointGroup. + + Attributes: + parent (str): + Required. The parent resource where this + endpoint group will be created. Format: + projects/{project}/locations/{location} + mirroring_endpoint_group_id (str): + Required. The ID to use for the endpoint + group, which will become the final component of + the endpoint group's resource name. + mirroring_endpoint_group (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup): + Required. The endpoint group to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + mirroring_endpoint_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + mirroring_endpoint_group: "MirroringEndpointGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="MirroringEndpointGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMirroringEndpointGroupRequest(proto.Message): + r"""Request message for UpdateMirroringEndpointGroup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the endpoint group (e.g. ``description``; *not* + ``mirroring_endpoint_group.description``). See + https://google.aip.dev/161 for more details. + mirroring_endpoint_group (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup): + Required. The endpoint group to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + mirroring_endpoint_group: "MirroringEndpointGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="MirroringEndpointGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMirroringEndpointGroupRequest(proto.Message): + r"""Request message for DeleteMirroringEndpointGroup. + + Attributes: + name (str): + Required. The endpoint group to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MirroringEndpointGroupAssociation(proto.Message): + r"""An endpoint group association represents a link between a + network and an endpoint group in the organization. + + Creating an association creates the networking infrastructure + linking the network to the endpoint group, but does not enable + mirroring by itself. To enable mirroring, the user must also + create a network firewall policy containing mirroring rules and + associate it with the network. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this endpoint + group association, for example: + ``projects/123456789/locations/global/mirroringEndpointGroupAssociations/my-eg-association``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + mirroring_endpoint_group (str): + Immutable. The endpoint group that this association is + connected to, for example: + ``projects/123456789/locations/global/mirroringEndpointGroups/my-eg``. + See https://google.aip.dev/124. + network (str): + Immutable. The VPC network that is associated. for example: + ``projects/123456789/global/networks/my-network``. See + https://google.aip.dev/124. + locations_details (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation.LocationDetails]): + Output only. The list of locations where the + association is present. This information is + retrieved from the linked endpoint group, and + not configured as part of the association + itself. + state (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation.State): + Output only. Current state of the endpoint + group association. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This part of the normal operation (e.g. + adding a new location to the target deployment + group). See https://google.aip.dev/128. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringLocation]): + Output only. The list of locations where the + association is configured. This information is + retrieved from the linked endpoint group. + """ + + class State(proto.Enum): + r"""The state of the association. + + Values: + STATE_UNSPECIFIED (0): + Not set. + ACTIVE (1): + The association is ready and in sync with the + linked endpoint group. + CREATING (3): + The association is being created. + DELETING (4): + The association is being deleted. + CLOSED (5): + The association is disabled due to a breaking + change in another resource. + OUT_OF_SYNC (6): + The association is out of sync with the linked endpoint + group. In most cases, this is a result of a transient issue + within the system (e.g. an inaccessible location) and the + system is expected to recover automatically. Check the + ``locations_details`` field for more details. + DELETE_FAILED (7): + An attempt to delete the association has + failed. This is a terminal state and the + association is not expected to be usable as some + of its resources have been deleted. + The only permitted operation is to retry + deleting the association. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 3 + DELETING = 4 + CLOSED = 5 + OUT_OF_SYNC = 6 + DELETE_FAILED = 7 + + class LocationDetails(proto.Message): + r"""Contains details about the state of an association in a + specific cloud location. + + Attributes: + location (str): + Output only. The cloud location, e.g. + "us-central1-a" or "asia-south1". + state (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation.LocationDetails.State): + Output only. The current state of the + association in this location. + """ + + class State(proto.Enum): + r"""The state of association. + + Values: + STATE_UNSPECIFIED (0): + Not set. + ACTIVE (1): + The association is ready and in sync with the + linked endpoint group. + OUT_OF_SYNC (2): + The association is out of sync with the + linked endpoint group. In most cases, this is a + result of a transient issue within the system + (e.g. an inaccessible location) and the system + is expected to recover automatically. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + OUT_OF_SYNC = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + state: "MirroringEndpointGroupAssociation.LocationDetails.State" = proto.Field( + proto.ENUM, + number=2, + enum="MirroringEndpointGroupAssociation.LocationDetails.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + mirroring_endpoint_group: str = proto.Field( + proto.STRING, + number=5, + ) + network: str = proto.Field( + proto.STRING, + number=6, + ) + locations_details: MutableSequence[LocationDetails] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=LocationDetails, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=9, + ) + locations: MutableSequence["MirroringLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="MirroringLocation", + ) + + +class ListMirroringEndpointGroupAssociationsRequest(proto.Message): + r"""Request message for ListMirroringEndpointGroupAssociations. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + associations. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListMirroringEndpointGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListMirroringEndpointGroups`` must + match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMirroringEndpointGroupAssociationsResponse(proto.Message): + r"""Response message for ListMirroringEndpointGroupAssociations. + + Attributes: + mirroring_endpoint_group_associations (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation]): + The associations from the specified parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + mirroring_endpoint_group_associations: MutableSequence[ + "MirroringEndpointGroupAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MirroringEndpointGroupAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMirroringEndpointGroupAssociationRequest(proto.Message): + r"""Request message for GetMirroringEndpointGroupAssociation. + + Attributes: + name (str): + Required. The name of the association to retrieve. Format: + projects/{project}/locations/{location}/mirroringEndpointGroupAssociations/{mirroring_endpoint_group_association} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMirroringEndpointGroupAssociationRequest(proto.Message): + r"""Request message for CreateMirroringEndpointGroupAssociation. + + Attributes: + parent (str): + Required. The parent resource where this + association will be created. Format: + projects/{project}/locations/{location} + mirroring_endpoint_group_association_id (str): + Optional. The ID to use for the new + association, which will become the final + component of the endpoint group's resource name. + If not provided, the server will generate a + unique ID. + mirroring_endpoint_group_association (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation): + Required. The association to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + mirroring_endpoint_group_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + mirroring_endpoint_group_association: "MirroringEndpointGroupAssociation" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="MirroringEndpointGroupAssociation", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMirroringEndpointGroupAssociationRequest(proto.Message): + r"""Request message for UpdateMirroringEndpointGroupAssociation. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the association (e.g. ``description``; *not* + ``mirroring_endpoint_group_association.description``). See + https://google.aip.dev/161 for more details. + mirroring_endpoint_group_association (google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation): + Required. The association to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + mirroring_endpoint_group_association: "MirroringEndpointGroupAssociation" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="MirroringEndpointGroupAssociation", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMirroringEndpointGroupAssociationRequest(proto.Message): + r"""Request message for DeleteMirroringEndpointGroupAssociation. + + Attributes: + name (str): + Required. The association to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MirroringDeploymentGroup(proto.Message): + r"""A deployment group aggregates many zonal mirroring backends + (deployments) into a single global mirroring service. Consumers + can connect this service using an endpoint group. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this deployment + group, for example: + ``projects/123456789/locations/global/mirroringDeploymentGroups/my-dg``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + network (str): + Required. Immutable. The network that will be used for all + child deployments, for example: + ``projects/{project}/global/networks/{network}``. See + https://google.aip.dev/124. + connected_endpoint_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup.ConnectedEndpointGroup]): + Output only. The list of endpoint groups that + are connected to this resource. + nested_deployments (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup.Deployment]): + Output only. The list of Mirroring + Deployments that belong to this group. + state (google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup.State): + Output only. The current state of the + deployment group. See + https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This is part of the normal operation (e.g. + adding a new deployment to the group) See + https://google.aip.dev/128. + description (str): + Optional. User-provided description of the + deployment group. Used as additional context for + the deployment group. + locations (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringLocation]): + Output only. The list of locations where the + deployment group is present. + """ + + class State(proto.Enum): + r"""The current state of the deployment group. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The deployment group is ready. + CREATING (2): + The deployment group is being created. + DELETING (3): + The deployment group is being deleted. + CLOSED (4): + The deployment group is being wiped out + (project deleted). + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + CLOSED = 4 + + class ConnectedEndpointGroup(proto.Message): + r"""An endpoint group connected to this deployment group. + + Attributes: + name (str): + Output only. The connected endpoint group's resource name, + for example: + ``projects/123456789/locations/global/mirroringEndpointGroups/my-eg``. + See https://google.aip.dev/124. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Deployment(proto.Message): + r"""A deployment belonging to this deployment group. + + Attributes: + name (str): + Output only. The name of the Mirroring Deployment, in the + format: + ``projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment}``. + state (google.cloud.network_security_v1alpha1.types.MirroringDeployment.State): + Output only. Most recent known state of the + deployment. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: "MirroringDeployment.State" = proto.Field( + proto.ENUM, + number=2, + enum="MirroringDeployment.State", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + connected_endpoint_groups: MutableSequence[ + ConnectedEndpointGroup + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=ConnectedEndpointGroup, + ) + nested_deployments: MutableSequence[Deployment] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=Deployment, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + locations: MutableSequence["MirroringLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="MirroringLocation", + ) + + +class ListMirroringDeploymentGroupsRequest(proto.Message): + r"""Request message for ListMirroringDeploymentGroups. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployment groups. Example: + ``projects/123456789/locations/global``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListMirroringDeploymentGroups`` call. Provide this to + retrieve the subsequent page. When paginating, all other + parameters provided to ``ListMirroringDeploymentGroups`` + must match the call that provided the page token. See + https://google.aip.dev/158 for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMirroringDeploymentGroupsResponse(proto.Message): + r"""Response message for ListMirroringDeploymentGroups. + + Attributes: + mirroring_deployment_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup]): + The deployment groups from the specified + parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + """ + + @property + def raw_page(self): + return self + + mirroring_deployment_groups: MutableSequence[ + "MirroringDeploymentGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MirroringDeploymentGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetMirroringDeploymentGroupRequest(proto.Message): + r"""Request message for GetMirroringDeploymentGroup. + + Attributes: + name (str): + Required. The name of the deployment group to retrieve. + Format: + projects/{project}/locations/{location}/mirroringDeploymentGroups/{mirroring_deployment_group} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMirroringDeploymentGroupRequest(proto.Message): + r"""Request message for CreateMirroringDeploymentGroup. + + Attributes: + parent (str): + Required. The parent resource where this + deployment group will be created. Format: + projects/{project}/locations/{location} + mirroring_deployment_group_id (str): + Required. The ID to use for the new + deployment group, which will become the final + component of the deployment group's resource + name. + mirroring_deployment_group (google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup): + Required. The deployment group to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + mirroring_deployment_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + mirroring_deployment_group: "MirroringDeploymentGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="MirroringDeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMirroringDeploymentGroupRequest(proto.Message): + r"""Request message for UpdateMirroringDeploymentGroup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the deployment group (e.g. ``description``; + *not* ``mirroring_deployment_group.description``). See + https://google.aip.dev/161 for more details. + mirroring_deployment_group (google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup): + Required. The deployment group to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + mirroring_deployment_group: "MirroringDeploymentGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="MirroringDeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMirroringDeploymentGroupRequest(proto.Message): + r"""Request message for DeleteMirroringDeploymentGroup. + + Attributes: + name (str): + Required. The deployment group to delete. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MirroringDeployment(proto.Message): + r"""A deployment represents a zonal mirroring backend ready to + accept GENEVE-encapsulated replica traffic, e.g. a zonal + instance group fronted by an internal passthrough load balancer. + Deployments are always part of a global deployment group which + represents a global mirroring service. + + Attributes: + name (str): + Immutable. Identifier. The resource name of this deployment, + for example: + ``projects/123456789/locations/us-central1-a/mirroringDeployments/my-dep``. + See https://google.aip.dev/122 for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. See + https://google.aip.dev/148#timestamps. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was most recently updated. See + https://google.aip.dev/148#timestamps. + labels (MutableMapping[str, str]): + Optional. Labels are key/value pairs that + help to organize and filter resources. + forwarding_rule (str): + Required. Immutable. The regional forwarding rule that + fronts the mirroring collectors, for example: + ``projects/123456789/regions/us-central1/forwardingRules/my-rule``. + See https://google.aip.dev/124. + mirroring_deployment_group (str): + Required. Immutable. The deployment group that this + deployment is a part of, for example: + ``projects/123456789/locations/global/mirroringDeploymentGroups/my-dg``. + See https://google.aip.dev/124. + state (google.cloud.network_security_v1alpha1.types.MirroringDeployment.State): + Output only. The current state of the + deployment. See https://google.aip.dev/216. + reconciling (bool): + Output only. The current state of the + resource does not match the user's intended + state, and the system is working to reconcile + them. This part of the normal operation (e.g. + linking a new association to the parent group). + See https://google.aip.dev/128. + description (str): + Optional. User-provided description of the + deployment. Used as additional context for the + deployment. + """ + + class State(proto.Enum): + r"""The current state of the deployment. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The deployment is ready and in sync with the + parent group. + CREATING (2): + The deployment is being created. + DELETING (3): + The deployment is being deleted. + OUT_OF_SYNC (4): + The deployment is out of sync with the parent + group. In most cases, this is a result of a + transient issue within the system (e.g. a + delayed data-path config) and the system is + expected to recover automatically. See the + parent deployment group's state for more + details. + DELETE_FAILED (5): + An attempt to delete the deployment has + failed. This is a terminal state and the + deployment is not expected to recover. The only + permitted operation is to retry deleting the + deployment. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CREATING = 2 + DELETING = 3 + OUT_OF_SYNC = 4 + DELETE_FAILED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=5, + ) + mirroring_deployment_group: str = proto.Field( + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=8, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + + +class ListMirroringDeploymentsRequest(proto.Message): + r"""Request message for ListMirroringDeployments. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployments. Example: + ``projects/123456789/locations/us-central1-a``. See + https://google.aip.dev/132 for more details. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. See https://google.aip.dev/158 for more + details. + page_token (str): + Optional. A page token, received from a previous + ``ListMirroringDeployments`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListMirroringDeployments`` must match the call + that provided the page token. See https://google.aip.dev/158 + for more details. + filter (str): + Optional. Filter expression. + See https://google.aip.dev/160#filtering for + more details. + order_by (str): + Optional. Sort expression. + See https://google.aip.dev/132#ordering for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMirroringDeploymentsResponse(proto.Message): + r"""Response message for ListMirroringDeployments. + + Attributes: + mirroring_deployments (MutableSequence[google.cloud.network_security_v1alpha1.types.MirroringDeployment]): + The deployments from the specified parent. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. See https://google.aip.dev/158 for more details. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + mirroring_deployments: MutableSequence["MirroringDeployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MirroringDeployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetMirroringDeploymentRequest(proto.Message): + r"""Request message for GetMirroringDeployment. + + Attributes: + name (str): + Required. The name of the deployment to retrieve. Format: + projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMirroringDeploymentRequest(proto.Message): + r"""Request message for CreateMirroringDeployment. + + Attributes: + parent (str): + Required. The parent resource where this + deployment will be created. Format: + projects/{project}/locations/{location} + mirroring_deployment_id (str): + Required. The ID to use for the new + deployment, which will become the final + component of the deployment's resource name. + mirroring_deployment (google.cloud.network_security_v1alpha1.types.MirroringDeployment): + Required. The deployment to create. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + mirroring_deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + mirroring_deployment: "MirroringDeployment" = proto.Field( + proto.MESSAGE, + number=3, + message="MirroringDeployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateMirroringDeploymentRequest(proto.Message): + r"""Request message for UpdateMirroringDeployment. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Fields are specified + relative to the deployment (e.g. ``description``; *not* + ``mirroring_deployment.description``). See + https://google.aip.dev/161 for more details. + mirroring_deployment (google.cloud.network_security_v1alpha1.types.MirroringDeployment): + Required. The deployment to update. + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + mirroring_deployment: "MirroringDeployment" = proto.Field( + proto.MESSAGE, + number=2, + message="MirroringDeployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteMirroringDeploymentRequest(proto.Message): + r"""Request message for DeleteMirroringDeployment. + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. A unique identifier for this request. Must be a + UUID4. This request is only idempotent if a ``request_id`` + is provided. See https://google.aip.dev/155 for more + details. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MirroringLocation(proto.Message): + r"""Details about mirroring in a specific cloud location. + + Attributes: + location (str): + Output only. The cloud location, e.g. + "us-central1-a" or "asia-south1". + state (google.cloud.network_security_v1alpha1.types.MirroringLocation.State): + Output only. The current state of the + association in this location. + """ + + class State(proto.Enum): + r"""The current state of a resource in the location. + + Values: + STATE_UNSPECIFIED (0): + State not set (this is not a valid state). + ACTIVE (1): + The resource is ready and in sync in the + location. + OUT_OF_SYNC (2): + The resource is out of sync in the location. + In most cases, this is a result of a transient + issue within the system (e.g. an inaccessible + location) and the system is expected to recover + automatically. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + OUT_OF_SYNC = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group.py new file mode 100644 index 000000000000..768054cd7f7a --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group_intercept, + security_profile_group_mirroring, + security_profile_group_threatprevention, + security_profile_group_urlfiltering, +) + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "SecurityProfileGroup", + "SecurityProfile", + }, +) + + +class SecurityProfileGroup(proto.Message): + r"""SecurityProfileGroup is a resource that defines the behavior + for various ProfileTypes. + + Attributes: + name (str): + Immutable. Identifier. Name of the SecurityProfileGroup + resource. It matches pattern + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + description (str): + Optional. An optional description of the + profile group. Max length 2048 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Resource creation timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last resource update timestamp. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + data_path_id (int): + Output only. Identifier used by the + data-path. Unique within {container, location}. + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs. + threat_prevention_profile (str): + Optional. Reference to a SecurityProfile with + the ThreatPrevention configuration. + custom_mirroring_profile (str): + Optional. Reference to a SecurityProfile with + the CustomMirroring configuration. + custom_intercept_profile (str): + Optional. Reference to a SecurityProfile with + the CustomIntercept configuration. + url_filtering_profile (str): + Optional. Reference to a SecurityProfile with + the UrlFiltering configuration. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=5, + ) + data_path_id: int = proto.Field( + proto.UINT64, + number=12, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + threat_prevention_profile: str = proto.Field( + proto.STRING, + number=6, + ) + custom_mirroring_profile: str = proto.Field( + proto.STRING, + number=8, + ) + custom_intercept_profile: str = proto.Field( + proto.STRING, + number=9, + ) + url_filtering_profile: str = proto.Field( + proto.STRING, + number=11, + ) + + +class SecurityProfile(proto.Message): + r"""SecurityProfile is a resource that defines the behavior for + one of many ProfileTypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + threat_prevention_profile (google.cloud.network_security_v1alpha1.types.ThreatPreventionProfile): + The threat prevention configuration for the + SecurityProfile. + + This field is a member of `oneof`_ ``profile``. + custom_mirroring_profile (google.cloud.network_security_v1alpha1.types.CustomMirroringProfile): + The custom Packet Mirroring v2 configuration + for the SecurityProfile. + + This field is a member of `oneof`_ ``profile``. + custom_intercept_profile (google.cloud.network_security_v1alpha1.types.CustomInterceptProfile): + The custom TPPI configuration for the + SecurityProfile. + + This field is a member of `oneof`_ ``profile``. + url_filtering_profile (google.cloud.network_security_v1alpha1.types.UrlFilteringProfile): + The URL filtering configuration for the + SecurityProfile. + + This field is a member of `oneof`_ ``profile``. + name (str): + Immutable. Identifier. Name of the SecurityProfile resource. + It matches pattern + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile}``. + description (str): + Optional. An optional description of the + profile. Max length 512 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Resource creation timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last resource update timestamp. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs. + type_ (google.cloud.network_security_v1alpha1.types.SecurityProfile.ProfileType): + Immutable. The single ProfileType that the + SecurityProfile resource configures. + """ + + class ProfileType(proto.Enum): + r"""The possible types that the SecurityProfile resource can + configure. + + Values: + PROFILE_TYPE_UNSPECIFIED (0): + Profile type not specified. + THREAT_PREVENTION (1): + Profile type for threat prevention. + CUSTOM_MIRRORING (2): + Profile type for packet mirroring v2 + CUSTOM_INTERCEPT (3): + Profile type for TPPI. + URL_FILTERING (5): + Profile type for URL filtering. + """ + PROFILE_TYPE_UNSPECIFIED = 0 + THREAT_PREVENTION = 1 + CUSTOM_MIRRORING = 2 + CUSTOM_INTERCEPT = 3 + URL_FILTERING = 5 + + threat_prevention_profile: security_profile_group_threatprevention.ThreatPreventionProfile = proto.Field( + proto.MESSAGE, + number=7, + oneof="profile", + message=security_profile_group_threatprevention.ThreatPreventionProfile, + ) + custom_mirroring_profile: security_profile_group_mirroring.CustomMirroringProfile = proto.Field( + proto.MESSAGE, + number=9, + oneof="profile", + message=security_profile_group_mirroring.CustomMirroringProfile, + ) + custom_intercept_profile: security_profile_group_intercept.CustomInterceptProfile = proto.Field( + proto.MESSAGE, + number=10, + oneof="profile", + message=security_profile_group_intercept.CustomInterceptProfile, + ) + url_filtering_profile: security_profile_group_urlfiltering.UrlFilteringProfile = ( + proto.Field( + proto.MESSAGE, + number=12, + oneof="profile", + message=security_profile_group_urlfiltering.UrlFilteringProfile, + ) + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + type_: ProfileType = proto.Field( + proto.ENUM, + number=6, + enum=ProfileType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_intercept.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_intercept.py new file mode 100644 index 000000000000..cc052fc93039 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_intercept.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "CustomInterceptProfile", + }, +) + + +class CustomInterceptProfile(proto.Message): + r"""CustomInterceptProfile defines in-band integration behavior + (intercept). It is used by firewall rules with an + APPLY_SECURITY_PROFILE_GROUP action. + + Attributes: + intercept_endpoint_group (str): + Required. The target InterceptEndpointGroup. + When a firewall rule with this security profile + attached matches a packet, the packet will be + intercepted to the location-local target in this + group. + """ + + intercept_endpoint_group: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_mirroring.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_mirroring.py new file mode 100644 index 000000000000..4bf3cddf1400 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_mirroring.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "CustomMirroringProfile", + }, +) + + +class CustomMirroringProfile(proto.Message): + r"""CustomMirroringProfile defines out-of-band integration + behavior (mirroring). It is used by mirroring rules with a + MIRROR action. + + Attributes: + mirroring_endpoint_group (str): + Required. Immutable. The target + MirroringEndpointGroup. When a mirroring rule + with this security profile attached matches a + packet, a replica will be mirrored to the + location-local target in this group. + """ + + mirroring_endpoint_group: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_service.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_service.py new file mode 100644 index 000000000000..c4c4ca11bd8e --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_service.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group as gcn_security_profile_group, +) + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "ListSecurityProfileGroupsRequest", + "ListSecurityProfileGroupsResponse", + "GetSecurityProfileGroupRequest", + "CreateSecurityProfileGroupRequest", + "UpdateSecurityProfileGroupRequest", + "DeleteSecurityProfileGroupRequest", + "ListSecurityProfilesRequest", + "ListSecurityProfilesResponse", + "GetSecurityProfileRequest", + "CreateSecurityProfileRequest", + "UpdateSecurityProfileRequest", + "DeleteSecurityProfileRequest", + }, +) + + +class ListSecurityProfileGroupsRequest(proto.Message): + r"""Request used with the ListSecurityProfileGroups method. + + Attributes: + parent (str): + Required. The project or organization and location from + which the SecurityProfileGroups should be listed, specified + in the format + ``projects|organizations/*/locations/{location}``. + page_size (int): + Maximum number of SecurityProfileGroups to + return per call. + page_token (str): + The value returned by the last + ``ListSecurityProfileGroupsResponse`` Indicates that this is + a continuation of a prior ``ListSecurityProfileGroups`` + call, and that the system should return the next page of + data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSecurityProfileGroupsResponse(proto.Message): + r"""Response returned by the ListSecurityProfileGroups method. + + Attributes: + security_profile_groups (MutableSequence[google.cloud.network_security_v1alpha1.types.SecurityProfileGroup]): + List of SecurityProfileGroups resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + """ + + @property + def raw_page(self): + return self + + security_profile_groups: MutableSequence[ + gcn_security_profile_group.SecurityProfileGroup + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcn_security_profile_group.SecurityProfileGroup, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSecurityProfileGroupRequest(proto.Message): + r"""Request used by the GetSecurityProfileGroup method. + + Attributes: + name (str): + Required. A name of the SecurityProfileGroup to get. Must be + in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSecurityProfileGroupRequest(proto.Message): + r"""Request used by the CreateSecurityProfileGroup method. + + Attributes: + parent (str): + Required. The parent resource of the SecurityProfileGroup. + Must be in the format + ``projects|organizations/*/locations/{location}``. + security_profile_group_id (str): + Required. Short name of the SecurityProfileGroup resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. + "security_profile_group1". + security_profile_group (google.cloud.network_security_v1alpha1.types.SecurityProfileGroup): + Required. SecurityProfileGroup resource to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + security_profile_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + security_profile_group: gcn_security_profile_group.SecurityProfileGroup = ( + proto.Field( + proto.MESSAGE, + number=3, + message=gcn_security_profile_group.SecurityProfileGroup, + ) + ) + + +class UpdateSecurityProfileGroupRequest(proto.Message): + r"""Request used by the UpdateSecurityProfileGroup method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfileGroup resource by the + update. The fields specified in the update_mask are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. + security_profile_group (google.cloud.network_security_v1alpha1.types.SecurityProfileGroup): + Required. Updated SecurityProfileGroup + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + security_profile_group: gcn_security_profile_group.SecurityProfileGroup = ( + proto.Field( + proto.MESSAGE, + number=2, + message=gcn_security_profile_group.SecurityProfileGroup, + ) + ) + + +class DeleteSecurityProfileGroupRequest(proto.Message): + r"""Request used by the DeleteSecurityProfileGroup method. + + Attributes: + name (str): + Required. A name of the SecurityProfileGroup to delete. Must + be in the format + ``projects|organizations/*/locations/{location}/securityProfileGroups/{security_profile_group}``. + etag (str): + Optional. If client provided etag is out of date, delete + will return FAILED_PRECONDITION error. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSecurityProfilesRequest(proto.Message): + r"""Request used with the ListSecurityProfiles method. + + Attributes: + parent (str): + Required. The project or organization and location from + which the SecurityProfiles should be listed, specified in + the format + ``projects|organizations/*/locations/{location}``. + page_size (int): + Maximum number of SecurityProfiles to return + per call. + page_token (str): + The value returned by the last + ``ListSecurityProfilesResponse`` Indicates that this is a + continuation of a prior ``ListSecurityProfiles`` call, and + that the system should return the next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSecurityProfilesResponse(proto.Message): + r"""Response returned by the ListSecurityProfiles method. + + Attributes: + security_profiles (MutableSequence[google.cloud.network_security_v1alpha1.types.SecurityProfile]): + List of SecurityProfile resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + """ + + @property + def raw_page(self): + return self + + security_profiles: MutableSequence[ + gcn_security_profile_group.SecurityProfile + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcn_security_profile_group.SecurityProfile, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSecurityProfileRequest(proto.Message): + r"""Request used by the GetSecurityProfile method. + + Attributes: + name (str): + Required. A name of the SecurityProfile to get. Must be in + the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSecurityProfileRequest(proto.Message): + r"""Request used by the CreateSecurityProfile method. + + Attributes: + parent (str): + Required. The parent resource of the SecurityProfile. Must + be in the format + ``projects|organizations/*/locations/{location}``. + security_profile_id (str): + Required. Short name of the SecurityProfile resource to be + created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. + "security_profile1". + security_profile (google.cloud.network_security_v1alpha1.types.SecurityProfile): + Required. SecurityProfile resource to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + security_profile_id: str = proto.Field( + proto.STRING, + number=2, + ) + security_profile: gcn_security_profile_group.SecurityProfile = proto.Field( + proto.MESSAGE, + number=3, + message=gcn_security_profile_group.SecurityProfile, + ) + + +class UpdateSecurityProfileRequest(proto.Message): + r"""Request used by the UpdateSecurityProfile method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the SecurityProfile resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. + security_profile (google.cloud.network_security_v1alpha1.types.SecurityProfile): + Required. Updated SecurityProfile resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + security_profile: gcn_security_profile_group.SecurityProfile = proto.Field( + proto.MESSAGE, + number=2, + message=gcn_security_profile_group.SecurityProfile, + ) + + +class DeleteSecurityProfileRequest(proto.Message): + r"""Request used by the DeleteSecurityProfile method. + + Attributes: + name (str): + Required. A name of the SecurityProfile to delete. Must be + in the format + ``projects|organizations/*/locations/{location}/securityProfiles/{security_profile_id}``. + etag (str): + Optional. If client provided etag is out of date, delete + will return FAILED_PRECONDITION error. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_threatprevention.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_threatprevention.py new file mode 100644 index 000000000000..bd28c675215d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_threatprevention.py @@ -0,0 +1,287 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "Severity", + "ThreatType", + "ThreatAction", + "Protocol", + "ThreatPreventionProfile", + "SeverityOverride", + "ThreatOverride", + "AntivirusOverride", + }, +) + + +class Severity(proto.Enum): + r"""Severity level. + + Values: + SEVERITY_UNSPECIFIED (0): + Severity level not specified. + INFORMATIONAL (1): + Suspicious events that do not pose an + immediate threat, but that are reported to call + attention to deeper problems that could possibly + exist. + LOW (2): + Warning-level threats that have very little + impact on an organization's infrastructure. They + usually require local or physical system access + and may often result in victim privacy issues + and information leakage. + MEDIUM (3): + Minor threats in which impact is minimized, + that do not compromise the target or exploits + that require an attacker to reside on the same + local network as the victim, affect only + non-standard configurations or obscure + applications, or provide very limited access. + HIGH (4): + Threats that have the ability to become + critical but have mitigating factors; for + example, they may be difficult to exploit, do + not result in elevated privileges, or do not + have a large victim pool. + CRITICAL (5): + Serious threats, such as those that affect + default installations of widely deployed + software, result in root compromise of servers, + and the exploit code is widely available to + attackers. The attacker usually does not need + any special authentication credentials or + knowledge about the individual victims and the + target does not need to be manipulated into + performing any special functions. + """ + SEVERITY_UNSPECIFIED = 0 + INFORMATIONAL = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + CRITICAL = 5 + + +class ThreatType(proto.Enum): + r"""Type of threat. + + Values: + THREAT_TYPE_UNSPECIFIED (0): + Type of threat not specified. + UNKNOWN (1): + Type of threat is not derivable from threat + ID. An override will be created for all types. + Firewall will ignore overridden signature ID's + that don't exist in the specific type. + VULNERABILITY (2): + Threats related to system flaws that an + attacker might otherwise attempt to exploit. + ANTIVIRUS (3): + Threats related to viruses and malware found + in executables and file types. + SPYWARE (4): + Threats related to command-and-control (C2) + activity, where spyware on an infected client is + collecting data without the user's consent + and/or communicating with a remote attacker. + DNS (5): + Threats related to DNS. + """ + THREAT_TYPE_UNSPECIFIED = 0 + UNKNOWN = 1 + VULNERABILITY = 2 + ANTIVIRUS = 3 + SPYWARE = 4 + DNS = 5 + + +class ThreatAction(proto.Enum): + r"""Threat action override. + + Values: + THREAT_ACTION_UNSPECIFIED (0): + Threat action not specified. + DEFAULT_ACTION (4): + The default action (as specified by the + vendor) is taken. + ALLOW (1): + The packet matching this rule will be allowed + to transmit. + ALERT (2): + The packet matching this rule will be allowed to transmit, + but a threat_log entry will be sent to the consumer project. + DENY (3): + The packet matching this rule will be dropped, and a + threat_log entry will be sent to the consumer project. + """ + THREAT_ACTION_UNSPECIFIED = 0 + DEFAULT_ACTION = 4 + ALLOW = 1 + ALERT = 2 + DENY = 3 + + +class Protocol(proto.Enum): + r"""Antivirus protocol. + + Values: + PROTOCOL_UNSPECIFIED (0): + Protocol not specified. + SMTP (1): + SMTP protocol + SMB (2): + SMB protocol + POP3 (3): + POP3 protocol + IMAP (4): + IMAP protocol + HTTP2 (5): + HTTP2 protocol + HTTP (6): + HTTP protocol + FTP (7): + FTP protocol + """ + PROTOCOL_UNSPECIFIED = 0 + SMTP = 1 + SMB = 2 + POP3 = 3 + IMAP = 4 + HTTP2 = 5 + HTTP = 6 + FTP = 7 + + +class ThreatPreventionProfile(proto.Message): + r"""ThreatPreventionProfile defines an action for specific threat + signatures or severity levels. + + Attributes: + severity_overrides (MutableSequence[google.cloud.network_security_v1alpha1.types.SeverityOverride]): + Optional. Configuration for overriding + threats actions by severity match. + threat_overrides (MutableSequence[google.cloud.network_security_v1alpha1.types.ThreatOverride]): + Optional. Configuration for overriding threats actions by + threat_id match. If a threat is matched both by + configuration provided in severity_overrides and + threat_overrides, the threat_overrides action is applied. + antivirus_overrides (MutableSequence[google.cloud.network_security_v1alpha1.types.AntivirusOverride]): + Optional. Configuration for overriding + antivirus actions per protocol. + """ + + severity_overrides: MutableSequence["SeverityOverride"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SeverityOverride", + ) + threat_overrides: MutableSequence["ThreatOverride"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ThreatOverride", + ) + antivirus_overrides: MutableSequence["AntivirusOverride"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AntivirusOverride", + ) + + +class SeverityOverride(proto.Message): + r"""Defines what action to take for a specific severity match. + + Attributes: + severity (google.cloud.network_security_v1alpha1.types.Severity): + Required. Severity level to match. + action (google.cloud.network_security_v1alpha1.types.ThreatAction): + Required. Threat action override. + """ + + severity: "Severity" = proto.Field( + proto.ENUM, + number=1, + enum="Severity", + ) + action: "ThreatAction" = proto.Field( + proto.ENUM, + number=2, + enum="ThreatAction", + ) + + +class ThreatOverride(proto.Message): + r"""Defines what action to take for a specific threat_id match. + + Attributes: + threat_id (str): + Required. Vendor-specific ID of a threat to + override. + type_ (google.cloud.network_security_v1alpha1.types.ThreatType): + Output only. Type of the threat (read only). + action (google.cloud.network_security_v1alpha1.types.ThreatAction): + Required. Threat action override. For some + threat types, only a subset of actions applies. + """ + + threat_id: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "ThreatType" = proto.Field( + proto.ENUM, + number=2, + enum="ThreatType", + ) + action: "ThreatAction" = proto.Field( + proto.ENUM, + number=3, + enum="ThreatAction", + ) + + +class AntivirusOverride(proto.Message): + r"""Defines what action to take for antivirus threats per + protocol. + + Attributes: + protocol (google.cloud.network_security_v1alpha1.types.Protocol): + Required. Protocol to match. + action (google.cloud.network_security_v1alpha1.types.ThreatAction): + Required. Threat action override. For some + threat types, only a subset of actions applies. + """ + + protocol: "Protocol" = proto.Field( + proto.ENUM, + number=1, + enum="Protocol", + ) + action: "ThreatAction" = proto.Field( + proto.ENUM, + number=2, + enum="ThreatAction", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_urlfiltering.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_urlfiltering.py new file mode 100644 index 000000000000..478b0f7c7119 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/security_profile_group_urlfiltering.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "UrlFilteringProfile", + "UrlFilter", + }, +) + + +class UrlFilteringProfile(proto.Message): + r"""UrlFilteringProfile defines filters based on URL. + + Attributes: + url_filters (MutableSequence[google.cloud.network_security_v1alpha1.types.UrlFilter]): + Optional. The list of filtering configs in + which each config defines an action to take for + some URL match. + """ + + url_filters: MutableSequence["UrlFilter"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="UrlFilter", + ) + + +class UrlFilter(proto.Message): + r"""A URL filter defines an action to take for some URL match. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filtering_action (google.cloud.network_security_v1alpha1.types.UrlFilter.UrlFilteringAction): + Required. The action taken when this filter + is applied. + urls (MutableSequence[str]): + Required. The list of strings that a URL must + match with for this filter to be applied. + priority (int): + Required. The priority of this filter within + the URL Filtering Profile. Lower integers + indicate higher priorities. The priority of a + filter must be unique within a URL Filtering + Profile. + + This field is a member of `oneof`_ ``_priority``. + """ + + class UrlFilteringAction(proto.Enum): + r"""Action to be taken when a URL matches a filter. + + Values: + URL_FILTERING_ACTION_UNSPECIFIED (0): + Filtering action not specified. + ALLOW (1): + The connection matching this filter will be + allowed to transmit. + DENY (2): + The connection matching this filter will be + dropped. + """ + URL_FILTERING_ACTION_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + filtering_action: UrlFilteringAction = proto.Field( + proto.ENUM, + number=1, + enum=UrlFilteringAction, + ) + urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + priority: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/server_tls_policy.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/server_tls_policy.py new file mode 100644 index 000000000000..564036af6658 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/server_tls_policy.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1alpha1.types import tls + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "ServerTlsPolicy", + "ListServerTlsPoliciesRequest", + "ListServerTlsPoliciesResponse", + "GetServerTlsPolicyRequest", + "CreateServerTlsPolicyRequest", + "UpdateServerTlsPolicyRequest", + "DeleteServerTlsPolicyRequest", + }, +) + + +class ServerTlsPolicy(proto.Message): + r"""ServerTlsPolicy is a resource that specifies how a server should + authenticate incoming requests. This resource itself does not affect + configuration unless it is attached to a target HTTPS proxy or + endpoint config selector resource. + + ServerTlsPolicy in the form accepted by Application Load Balancers + can be attached only to TargetHttpsProxy with an ``EXTERNAL``, + ``EXTERNAL_MANAGED`` or ``INTERNAL_MANAGED`` load balancing scheme. + Traffic Director compatible ServerTlsPolicies can be attached to + EndpointPolicy and TargetHttpsProxy with Traffic Director + ``INTERNAL_SELF_MANAGED`` load balancing scheme. + + Attributes: + name (str): + Required. Name of the ServerTlsPolicy resource. It matches + the pattern + ``projects/*/locations/{location}/serverTlsPolicies/{server_tls_policy}`` + description (str): + Free-text description of the resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + labels (MutableMapping[str, str]): + Set of label tags associated with the + resource. + allow_open (bool): + This field applies only for Traffic Director policies. It is + must be set to false for Application Load Balancer policies. + + Determines if server allows plaintext connections. If set to + true, server allows plain text connections. By default, it + is set to false. This setting is not exclusive of other + encryption modes. For example, if ``allow_open`` and + ``mtls_policy`` are set, server allows both plain text and + mTLS connections. See documentation of other encryption + modes to confirm compatibility. + + Consider using it if you wish to upgrade in place your + deployment to TLS while having mixed TLS and non-TLS traffic + reaching port :80. + server_certificate (google.cloud.network_security_v1alpha1.types.CertificateProvider): + Optional if policy is to be used with Traffic Director. For + Application Load Balancers must be empty. + + Defines a mechanism to provision server identity (public and + private keys). Cannot be combined with ``allow_open`` as a + permissive mode that allows both plain text and TLS is not + supported. + mtls_policy (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy.MTLSPolicy): + This field is required if the policy is used with + Application Load Balancers. This field can be empty for + Traffic Director. + + Defines a mechanism to provision peer validation + certificates for peer to peer authentication (Mutual TLS - + mTLS). If not specified, client certificate will not be + requested. The connection is treated as TLS and not mTLS. If + ``allow_open`` and ``mtls_policy`` are set, server allows + both plain text and mTLS connections. + """ + + class MTLSPolicy(proto.Message): + r"""Specification of the MTLSPolicy. + + Attributes: + client_validation_mode (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy.MTLSPolicy.ClientValidationMode): + When the client presents an invalid certificate or no + certificate to the load balancer, the + ``client_validation_mode`` specifies how the client + connection is handled. + + Required if the policy is to be used with the Application + Load Balancers. For Traffic Director it must be empty. + client_validation_ca (MutableSequence[google.cloud.network_security_v1alpha1.types.ValidationCA]): + Required if the policy is to be used with + Traffic Director. For Application Load Balancers + it must be empty. + + Defines the mechanism to obtain the Certificate + Authority certificate to validate the client + certificate. + client_validation_trust_config (str): + Reference to the TrustConfig from + certificatemanager.googleapis.com namespace. + + If specified, the chain validation will be + performed against certificates configured in the + given TrustConfig. + + Allowed only if the policy is to be used with + Application Load Balancers. + """ + + class ClientValidationMode(proto.Enum): + r"""Mutual TLS certificate validation mode. + + Values: + CLIENT_VALIDATION_MODE_UNSPECIFIED (0): + Not allowed. + ALLOW_INVALID_OR_MISSING_CLIENT_CERT (1): + Allow connection even if certificate chain + validation of the client certificate failed or + no client certificate was presented. The proof + of possession of the private key is always + checked if client certificate was presented. + This mode requires the backend to implement + processing of data extracted from a client + certificate to authenticate the peer, or to + reject connections if the client certificate + fingerprint is missing. + REJECT_INVALID (2): + Require a client certificate and allow connection to the + backend only if validation of the client certificate passed. + + If set, requires a reference to non-empty TrustConfig + specified in ``client_validation_trust_config``. + """ + CLIENT_VALIDATION_MODE_UNSPECIFIED = 0 + ALLOW_INVALID_OR_MISSING_CLIENT_CERT = 1 + REJECT_INVALID = 2 + + client_validation_mode: "ServerTlsPolicy.MTLSPolicy.ClientValidationMode" = ( + proto.Field( + proto.ENUM, + number=2, + enum="ServerTlsPolicy.MTLSPolicy.ClientValidationMode", + ) + ) + client_validation_ca: MutableSequence[tls.ValidationCA] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=tls.ValidationCA, + ) + client_validation_trust_config: str = proto.Field( + proto.STRING, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + allow_open: bool = proto.Field( + proto.BOOL, + number=6, + ) + server_certificate: tls.CertificateProvider = proto.Field( + proto.MESSAGE, + number=7, + message=tls.CertificateProvider, + ) + mtls_policy: MTLSPolicy = proto.Field( + proto.MESSAGE, + number=8, + message=MTLSPolicy, + ) + + +class ListServerTlsPoliciesRequest(proto.Message): + r"""Request used by the ListServerTlsPolicies method. + + Attributes: + parent (str): + Required. The project and location from which the + ServerTlsPolicies should be listed, specified in the format + ``projects/*/locations/{location}``. + page_size (int): + Maximum number of ServerTlsPolicies to return + per call. + page_token (str): + The value returned by the last + ``ListServerTlsPoliciesResponse`` Indicates that this is a + continuation of a prior ``ListServerTlsPolicies`` call, and + that the system should return the next page of data. + return_partial_success (bool): + Optional. Setting this field to ``true`` will opt the + request into returning the resources that are reachable, and + into including the names of those that were unreachable in + the [ListServerTlsPoliciesResponse.unreachable] field. This + can only be ``true`` when reading across collections e.g. + when ``parent`` is set to + ``"projects/example/locations/-"``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListServerTlsPoliciesResponse(proto.Message): + r"""Response returned by the ListServerTlsPolicies method. + + Attributes: + server_tls_policies (MutableSequence[google.cloud.network_security_v1alpha1.types.ServerTlsPolicy]): + List of ServerTlsPolicy resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + unreachable (MutableSequence[str]): + Unreachable resources. Populated when the request opts into + ``return_partial_success`` and reading across collections + e.g. when attempting to list all resources across all + supported locations. + """ + + @property + def raw_page(self): + return self + + server_tls_policies: MutableSequence["ServerTlsPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ServerTlsPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetServerTlsPolicyRequest(proto.Message): + r"""Request used by the GetServerTlsPolicy method. + + Attributes: + name (str): + Required. A name of the ServerTlsPolicy to get. Must be in + the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateServerTlsPolicyRequest(proto.Message): + r"""Request used by the CreateServerTlsPolicy method. + + Attributes: + parent (str): + Required. The parent resource of the ServerTlsPolicy. Must + be in the format ``projects/*/locations/{location}``. + server_tls_policy_id (str): + Required. Short name of the ServerTlsPolicy resource to be + created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. + "server_mtls_policy". + server_tls_policy (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy): + Required. ServerTlsPolicy resource to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + server_tls_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + server_tls_policy: "ServerTlsPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="ServerTlsPolicy", + ) + + +class UpdateServerTlsPolicyRequest(proto.Message): + r"""Request used by UpdateServerTlsPolicy method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the ServerTlsPolicy resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + server_tls_policy (google.cloud.network_security_v1alpha1.types.ServerTlsPolicy): + Required. Updated ServerTlsPolicy resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + server_tls_policy: "ServerTlsPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="ServerTlsPolicy", + ) + + +class DeleteServerTlsPolicyRequest(proto.Message): + r"""Request used by the DeleteServerTlsPolicy method. + + Attributes: + name (str): + Required. A name of the ServerTlsPolicy to delete. Must be + in the format + ``projects/*/locations/{location}/serverTlsPolicies/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_gateway.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_gateway.py new file mode 100644 index 000000000000..9322456241b6 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_gateway.py @@ -0,0 +1,647 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "PartnerSSEGateway", + "ListPartnerSSEGatewaysRequest", + "ListPartnerSSEGatewaysResponse", + "GetPartnerSSEGatewayRequest", + "CreatePartnerSSEGatewayRequest", + "DeletePartnerSSEGatewayRequest", + "UpdatePartnerSSEGatewayRequest", + "SSEGatewayReference", + "ListSSEGatewayReferencesRequest", + "ListSSEGatewayReferencesResponse", + "GetSSEGatewayReferenceRequest", + }, +) + + +class PartnerSSEGateway(proto.Message): + r"""Message describing PartnerSSEGateway object + + Attributes: + name (str): + Immutable. name of resource + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + sse_vpc_subnet_range (str): + Output only. Subnet range of the subnet where partner + traffic is routed. This field is deprecated. Use + sse_subnet_range instead. + sse_vpc_target_ip (str): + Output only. This is the IP where the partner traffic should + be routed to. This field is deprecated. Use sse_target_ip + instead. + sse_gateway_reference_id (str): + Required. ID of the SSEGatewayReference that + pairs with this PartnerSSEGateway + sse_bgp_ips (MutableSequence[str]): + Output only. IP of SSE BGP + sse_bgp_asn (int): + Output only. ASN of SSE BGP + partner_vpc_subnet_range (str): + Optional. Subnet range of the partner_vpc This field is + deprecated. Use partner_subnet_range instead. + partner_sse_realm (str): + Output only. name of PartnerSSERealm owning + the PartnerSSEGateway + sse_subnet_range (str): + Optional. Subnet range where SSE GW instances + are deployed. Default value is set to + "100.88.255.0/24". The CIDR suffix should be + less than or equal to 25. + sse_target_ip (str): + Output only. Target IP that belongs to sse_subnet_range + where partner should send the traffic to reach the customer + networks. + partner_subnet_range (str): + Optional. Subnet range of the partner-owned + subnet. + vni (int): + Optional. Virtual Network Identifier to use + in NCG. Today the only partner that depends on + it is Symantec. + symantec_options (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway.PartnerSSEGatewaySymantecOptions): + Optional. Required iff Partner is Symantec. + sse_project (str): + Output only. The project owning partner_facing_network. Only + filled for PartnerSSEGateways associated with Symantec + today. + sse_network (str): + Output only. The ID of the network in sse_project containing + sse_subnet_range. This is also known as the + partnerFacingNetwork. Only filled for PartnerSSEGateways + associated with Symantec today. + partner_sse_environment (str): + Output only. Full URI of the partner + environment this PartnerSSEGateway is connected + to. Filled from the customer SSEGateway, and + only for PartnerSSEGateways associated with + Symantec today. + country (str): + Output only. ISO-3166 alpha 2 country code + used for localization. Filled from the customer + SSEGateway, and only for PartnerSSEGateways + associated with Symantec today. + timezone (str): + Output only. tzinfo identifier used for + localization. Filled from the customer + SSEGateway, and only for PartnerSSEGateways + associated with Symantec today. + capacity_bps (int): + Output only. Copied from the associated NCC + resource in Symantec NCCGW flows. Used by + Symantec API. + state (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway.State): + Output only. State of the gateway. + prober_subnet_ranges (MutableSequence[str]): + Output only. Subnet ranges for Google-issued + probe packets. It's populated only for Prisma + Access partners. + """ + + class State(proto.Enum): + r"""State of the gateway. + + Values: + STATE_UNSPECIFIED (0): + No state specified. This should not be used. + CUSTOMER_ATTACHED (1): + Attached to a customer. This is the default + state when a gateway is successfully created. + CUSTOMER_DETACHED (2): + No longer attached to a customer. This state + arises when the customer attachment is deleted. + """ + STATE_UNSPECIFIED = 0 + CUSTOMER_ATTACHED = 1 + CUSTOMER_DETACHED = 2 + + class PartnerSSEGatewaySymantecOptions(proto.Message): + r"""Options specific to gateways connected to Symantec. + + Attributes: + symantec_location_uuid (str): + Output only. UUID of the Symantec Location + created on the customer's behalf. + symantec_site_target_host (str): + Optional. Target for the NCGs to send traffic + to on the Symantec side. Only supports IP + address today. + symantec_site (str): + Output only. Symantec data center identifier + that this SSEGW will connect to. Filled from the + customer SSEGateway, and only for + PartnerSSEGateways associated with Symantec + today. + """ + + symantec_location_uuid: str = proto.Field( + proto.STRING, + number=1, + ) + symantec_site_target_host: str = proto.Field( + proto.STRING, + number=2, + ) + symantec_site: str = proto.Field( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + sse_vpc_subnet_range: str = proto.Field( + proto.STRING, + number=5, + ) + sse_vpc_target_ip: str = proto.Field( + proto.STRING, + number=6, + ) + sse_gateway_reference_id: str = proto.Field( + proto.STRING, + number=7, + ) + sse_bgp_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + sse_bgp_asn: int = proto.Field( + proto.INT32, + number=9, + ) + partner_vpc_subnet_range: str = proto.Field( + proto.STRING, + number=11, + ) + partner_sse_realm: str = proto.Field( + proto.STRING, + number=12, + ) + sse_subnet_range: str = proto.Field( + proto.STRING, + number=17, + ) + sse_target_ip: str = proto.Field( + proto.STRING, + number=18, + ) + partner_subnet_range: str = proto.Field( + proto.STRING, + number=19, + ) + vni: int = proto.Field( + proto.INT32, + number=20, + ) + symantec_options: PartnerSSEGatewaySymantecOptions = proto.Field( + proto.MESSAGE, + number=21, + message=PartnerSSEGatewaySymantecOptions, + ) + sse_project: str = proto.Field( + proto.STRING, + number=22, + ) + sse_network: str = proto.Field( + proto.STRING, + number=23, + ) + partner_sse_environment: str = proto.Field( + proto.STRING, + number=24, + ) + country: str = proto.Field( + proto.STRING, + number=25, + ) + timezone: str = proto.Field( + proto.STRING, + number=26, + ) + capacity_bps: int = proto.Field( + proto.INT64, + number=28, + ) + state: State = proto.Field( + proto.ENUM, + number=29, + enum=State, + ) + prober_subnet_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=30, + ) + + +class ListPartnerSSEGatewaysRequest(proto.Message): + r"""Message for requesting list of PartnerSSEGateways + + Attributes: + parent (str): + Required. Parent value for + ListPartnerSSEGatewaysRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPartnerSSEGatewaysResponse(proto.Message): + r"""Message for response to listing PartnerSSEGateways + + Attributes: + partner_sse_gateways (MutableSequence[google.cloud.network_security_v1alpha1.types.PartnerSSEGateway]): + The list of PartnerSSEGateway + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + partner_sse_gateways: MutableSequence["PartnerSSEGateway"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PartnerSSEGateway", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetPartnerSSEGatewayRequest(proto.Message): + r"""Message for getting a PartnerSSEGateway + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreatePartnerSSEGatewayRequest(proto.Message): + r"""Message for creating a PartnerSSEGateway + + Attributes: + parent (str): + Required. Value for parent. + partner_sse_gateway_id (str): + Required. Id of the requesting object If auto-generating Id + server-side, remove this field and partner_sse_gateway_id + from the method_signature of Create RPC + partner_sse_gateway (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + partner_sse_gateway_id: str = proto.Field( + proto.STRING, + number=2, + ) + partner_sse_gateway: "PartnerSSEGateway" = proto.Field( + proto.MESSAGE, + number=3, + message="PartnerSSEGateway", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeletePartnerSSEGatewayRequest(proto.Message): + r"""Message for deleting a PartnerSSEGateway + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdatePartnerSSEGatewayRequest(proto.Message): + r"""Message for deleting a PartnerSSEGateway + + Attributes: + partner_sse_gateway (google.cloud.network_security_v1alpha1.types.PartnerSSEGateway): + Required. The resource being created + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + partner_sse_gateway: "PartnerSSEGateway" = proto.Field( + proto.MESSAGE, + number=1, + message="PartnerSSEGateway", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SSEGatewayReference(proto.Message): + r"""Message describing SSEGatewayReference object + + Attributes: + name (str): + Immutable. name of resource + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + labels (MutableMapping[str, str]): + Optional. Labels as key value pairs + partner_sse_realm (str): + Output only. PartnerSSERealm owning the + PartnerSSEGateway that this SSEGateway intends + to connect with + prober_subnet_ranges (MutableSequence[str]): + Output only. Subnet ranges for Google probe + packets. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + partner_sse_realm: str = proto.Field( + proto.STRING, + number=5, + ) + prober_subnet_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + +class ListSSEGatewayReferencesRequest(proto.Message): + r"""Message for requesting list of SSEGatewayReferences + + Attributes: + parent (str): + Required. Parent value for + ListSSEGatewayReferencesRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSSEGatewayReferencesResponse(proto.Message): + r"""Message for response to listing SSEGatewayReferences + + Attributes: + sse_gateway_references (MutableSequence[google.cloud.network_security_v1alpha1.types.SSEGatewayReference]): + The list of SSEGatewayReference + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + sse_gateway_references: MutableSequence[ + "SSEGatewayReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SSEGatewayReference", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSSEGatewayReferenceRequest(proto.Message): + r"""Message for getting a SSEGatewayReference + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_realm.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_realm.py new file mode 100644 index 000000000000..44c7aebb0087 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/sse_realm.py @@ -0,0 +1,1073 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "SACRealm", + "ListSACRealmsRequest", + "ListSACRealmsResponse", + "GetSACRealmRequest", + "CreateSACRealmRequest", + "DeleteSACRealmRequest", + "SACAttachment", + "ListSACAttachmentsRequest", + "ListSACAttachmentsResponse", + "GetSACAttachmentRequest", + "CreateSACAttachmentRequest", + "DeleteSACAttachmentRequest", + "PartnerSSERealm", + "ListPartnerSSERealmsRequest", + "ListPartnerSSERealmsResponse", + "GetPartnerSSERealmRequest", + "CreatePartnerSSERealmRequest", + "DeletePartnerSSERealmRequest", + }, +) + + +class SACRealm(proto.Message): + r"""Represents a Secure Access Connect (SAC) realm resource. + + A Secure Access Connect realm establishes a connection between + your Google Cloud project and an SSE service. + + Attributes: + name (str): + Identifier. Resource name, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the realm was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the realm was + last updated. + labels (MutableMapping[str, str]): + Optional. Optional list of labels applied to + the resource. + security_service (google.cloud.network_security_v1alpha1.types.SACRealm.SecurityService): + Immutable. SSE service provider associated + with the realm. + pairing_key (google.cloud.network_security_v1alpha1.types.SACRealm.PairingKey): + Output only. Key to be shared with SSE + service provider during pairing. + state (google.cloud.network_security_v1alpha1.types.SACRealm.State): + Output only. State of the realm. + symantec_options (google.cloud.network_security_v1alpha1.types.SACRealm.SACRealmSymantecOptions): + Optional. Configuration required for Symantec + realms. + """ + + class SecurityService(proto.Enum): + r"""SSE service provider + + Values: + SECURITY_SERVICE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + PALO_ALTO_PRISMA_ACCESS (1): + `Palo Alto Networks Prisma + Access `__. + SYMANTEC_CLOUD_SWG (2): + Symantec Cloud SWG. + """ + SECURITY_SERVICE_UNSPECIFIED = 0 + PALO_ALTO_PRISMA_ACCESS = 1 + SYMANTEC_CLOUD_SWG = 2 + + class State(proto.Enum): + r"""State of the realm. + + Values: + STATE_UNSPECIFIED (0): + No state specified. This should not be used. + PENDING_PARTNER_ATTACHMENT (7): + Has never been attached to a partner. + Used only for Prisma Access. + PARTNER_ATTACHED (1): + Currently attached to a partner. + PARTNER_DETACHED (2): + Was once attached to a partner but has been + detached. + KEY_EXPIRED (3): + Is not attached to a partner and has an + expired pairing key. Used only for Prisma + Access. + """ + STATE_UNSPECIFIED = 0 + PENDING_PARTNER_ATTACHMENT = 7 + PARTNER_ATTACHED = 1 + PARTNER_DETACHED = 2 + KEY_EXPIRED = 3 + + class PairingKey(proto.Message): + r"""Key to be shared with SSE service provider to establish + global handshake. + + Attributes: + key (str): + Output only. Key value. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp in UTC of when this + resource is considered expired. It expires 7 + days after creation. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class SACRealmSymantecOptions(proto.Message): + r"""Fields specific to realms using Symantec Cloud SWG. + + Attributes: + available_symantec_sites (MutableSequence[str]): + Output only. Symantec site IDs which the user + can choose to connect to. + secret_path (str): + Optional. API Key used to call Symantec APIs on the user's + behalf. Required if using Symantec Cloud SWG. P4SA account + needs permissions granted to read this secret. + + A secret ID, secret name, or secret URI can be specified, + but it will be parsed and stored as a secret URI in the form + ``projects/{project_number}/secrets/my-secret``. + symantec_connection_state (google.cloud.network_security_v1alpha1.types.SACRealm.SACRealmSymantecOptions.SymantecConnectionState): + Output only. Connection status to Symantec + API. + """ + + class SymantecConnectionState(proto.Enum): + r"""Connection status to Symantec API. + + Values: + SYMANTEC_CONNECTION_STATE_UNSPECIFIED (0): + No state specified. This should not be used. + SUCCEEDED (1): + Successfully made a request to Symantec API. + READ_SECRET_FAILED (2): + Cannot access the API key in the provided ``secret_path``. + REQUEST_TO_SYMANTEC_FAILED (3): + Failed to get a successful response from + Symantec API due to an invalid API key or + Symantec API unavailability. + """ + SYMANTEC_CONNECTION_STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + READ_SECRET_FAILED = 2 + REQUEST_TO_SYMANTEC_FAILED = 3 + + available_symantec_sites: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + secret_path: str = proto.Field( + proto.STRING, + number=4, + ) + symantec_connection_state: "SACRealm.SACRealmSymantecOptions.SymantecConnectionState" = proto.Field( + proto.ENUM, + number=5, + enum="SACRealm.SACRealmSymantecOptions.SymantecConnectionState", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + security_service: SecurityService = proto.Field( + proto.ENUM, + number=5, + enum=SecurityService, + ) + pairing_key: PairingKey = proto.Field( + proto.MESSAGE, + number=6, + message=PairingKey, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + symantec_options: SACRealmSymantecOptions = proto.Field( + proto.MESSAGE, + number=8, + message=SACRealmSymantecOptions, + ) + + +class ListSACRealmsRequest(proto.Message): + r"""Request for ``ListSACRealms`` method. + + Attributes: + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/global``. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression that filters the list + of results. + order_by (str): + Optional. Sort the results by a certain + order. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSACRealmsResponse(proto.Message): + r"""Response for ``ListSACRealms`` method. + + Attributes: + sac_realms (MutableSequence[google.cloud.network_security_v1alpha1.types.SACRealm]): + The list of SACRealms. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + sac_realms: MutableSequence["SACRealm"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SACRealm", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSACRealmRequest(proto.Message): + r"""Request for ``GetSACRealm`` method. + + Attributes: + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSACRealmRequest(proto.Message): + r"""Request for ``CreateSACRealm`` method. + + Attributes: + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/global``. + sac_realm_id (str): + Required. ID of the created realm. The ID must be 1-63 + characters long, and comply with RFC1035. Specifically, it + must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + sac_realm (google.cloud.network_security_v1alpha1.types.SACRealm): + Required. The resource being created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + sac_realm_id: str = proto.Field( + proto.STRING, + number=2, + ) + sac_realm: "SACRealm" = proto.Field( + proto.MESSAGE, + number=3, + message="SACRealm", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteSACRealmRequest(proto.Message): + r"""Request for ``DeleteSACRealm`` method. + + Attributes: + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/global/sacRealms/{sacRealm}``. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SACAttachment(proto.Message): + r"""Represents a Secure Access Connect (SAC) attachment resource. + + A Secure Access Connect attachment enables NCC Gateway to + process traffic with an SSE product. + + Attributes: + name (str): + Identifier. Resource name, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the attachment + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the attachment + was last updated. + labels (MutableMapping[str, str]): + Optional. Optional list of labels applied to + the resource. + sac_realm (str): + Required. SAC Realm which owns the attachment. This can be + input as an ID or a full resource name. The output always + has the form + ``projects/{project_number}/locations/{location}/sacRealms/{sac_realm}``. + ncc_gateway (str): + Required. NCC Gateway associated with the attachment. This + can be input as an ID or a full resource name. The output + always has the form + ``projects/{project_number}/locations/{location}/spokes/{ncc_gateway}``. + country (str): + Optional. Case-insensitive ISO-3166 alpha-2 + country code used for localization. Only valid + for Symantec attachments. + time_zone (str): + Optional. Case-sensitive tzinfo identifier + used for localization. Only valid for Symantec + attachments. + symantec_options (google.cloud.network_security_v1alpha1.types.SACAttachment.SACAttachmentSymantecOptions): + Optional. Configuration required for Symantec + attachments. + state (google.cloud.network_security_v1alpha1.types.SACAttachment.State): + Output only. State of the attachment. + """ + + class State(proto.Enum): + r"""State of the attachment. + + Values: + STATE_UNSPECIFIED (0): + No state specified. This should not be used. + PENDING_PARTNER_ATTACHMENT (1): + Has never been attached to a partner. + PARTNER_ATTACHED (2): + Currently attached to a partner. + PARTNER_DETACHED (3): + Was once attached to a partner but has been + detached. + """ + STATE_UNSPECIFIED = 0 + PENDING_PARTNER_ATTACHMENT = 1 + PARTNER_ATTACHED = 2 + PARTNER_DETACHED = 3 + + class SACAttachmentSymantecOptions(proto.Message): + r"""Fields specific to attachments associated with Symantec Cloud + SWG. + + Attributes: + symantec_site (str): + Immutable. Symantec data center identifier + that this attachment will connect to. + symantec_location_name (str): + Immutable. Name to be used when creating a + location on the customer's behalf in Symantec's + Location API. Not to be confused with Google + Cloud locations. + """ + + symantec_site: str = proto.Field( + proto.STRING, + number=1, + ) + symantec_location_name: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + sac_realm: str = proto.Field( + proto.STRING, + number=5, + ) + ncc_gateway: str = proto.Field( + proto.STRING, + number=6, + ) + country: str = proto.Field( + proto.STRING, + number=7, + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + symantec_options: SACAttachmentSymantecOptions = proto.Field( + proto.MESSAGE, + number=9, + message=SACAttachmentSymantecOptions, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + + +class ListSACAttachmentsRequest(proto.Message): + r"""Request for ``ListSACAttachments`` method. + + Attributes: + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. An expression that filters the list + of results. + order_by (str): + Optional. Sort the results by a certain + order. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListSACAttachmentsResponse(proto.Message): + r"""Response for ``ListSACAttachments`` method. + + Attributes: + sac_attachments (MutableSequence[google.cloud.network_security_v1alpha1.types.SACAttachment]): + The list of SACAttachments. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + sac_attachments: MutableSequence["SACAttachment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SACAttachment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetSACAttachmentRequest(proto.Message): + r"""Request for ``GetSACAttachment`` method. + + Attributes: + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSACAttachmentRequest(proto.Message): + r"""Request for ``CreateSACAttachment`` method. + + Attributes: + parent (str): + Required. The parent, in the form + ``projects/{project}/locations/{location}``. + sac_attachment_id (str): + Required. ID of the created attachment. The ID must be 1-63 + characters long, and comply with RFC1035. Specifically, it + must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + sac_attachment (google.cloud.network_security_v1alpha1.types.SACAttachment): + Required. The resource being created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + sac_attachment_id: str = proto.Field( + proto.STRING, + number=2, + ) + sac_attachment: "SACAttachment" = proto.Field( + proto.MESSAGE, + number=3, + message="SACAttachment", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteSACAttachmentRequest(proto.Message): + r"""Request for ``DeleteSACAttachment`` method. + + Attributes: + name (str): + Required. Name of the resource, in the form + ``projects/{project}/locations/{location}/sacAttachments/{sac_attachment}``. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PartnerSSERealm(proto.Message): + r"""Message describing PartnerSSERealm object + + Attributes: + name (str): + name of resource + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp + labels (MutableMapping[str, str]): + Labels as key value pairs + pairing_key (str): + Required. value of the key to establish + global handshake from SSERealm + partner_vpc (str): + Optional. VPC owned by the partner to be peered with CDEN + sse_vpc in sse_project This field is deprecated. Use + partner_network instead. + sse_vpc (str): + Output only. CDEN owned VPC to be peered with partner_vpc + This field is deprecated. Use sse_network instead. + sse_project (str): + Output only. CDEN owned project owning sse_vpc. It stores + project id in the TTM flow, but project number in the NCCGW + flow. This field will be deprecated after the partner + migrates from using sse_project to using sse_project_number. + state (google.cloud.network_security_v1alpha1.types.PartnerSSERealm.State): + Output only. State of the realm. It can be either + CUSTOMER_ATTACHED or CUSTOMER_DETACHED. + partner_network (str): + Optional. Partner-owned network to be peered with CDEN's + sse_network in sse_project + sse_network (str): + Output only. CDEN-owned network to be peered with + partner_network + pan_options (google.cloud.network_security_v1alpha1.types.PartnerSSERealm.PartnerSSERealmPanOptions): + Optional. Required only for PAN. + sse_project_number (int): + Output only. CDEN owned project owning sse_vpc + """ + + class State(proto.Enum): + r"""State of the realm + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + CUSTOMER_ATTACHED (1): + This PartnerSSERealm is attached to a + customer realm. This is the default state when a + PartnerSSERealm is successfully created. + CUSTOMER_DETACHED (2): + This PartnerSSERealm is no longer attached to + a customer realm. This is the state when the + customer realm is deleted. + """ + STATE_UNSPECIFIED = 0 + CUSTOMER_ATTACHED = 1 + CUSTOMER_DETACHED = 2 + + class PartnerSSERealmPanOptions(proto.Message): + r"""Fields specific to PAN realms. + + Attributes: + serial_number (str): + Optional. serial_number is provided by PAN to identify GCP + customer on PAN side. + tenant_id (str): + Optional. tenant_id is provided by PAN to identify GCP + customer on PAN side. + """ + + serial_number: str = proto.Field( + proto.STRING, + number=1, + ) + tenant_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + pairing_key: str = proto.Field( + proto.STRING, + number=5, + ) + partner_vpc: str = proto.Field( + proto.STRING, + number=6, + ) + sse_vpc: str = proto.Field( + proto.STRING, + number=7, + ) + sse_project: str = proto.Field( + proto.STRING, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + partner_network: str = proto.Field( + proto.STRING, + number=10, + ) + sse_network: str = proto.Field( + proto.STRING, + number=11, + ) + pan_options: PartnerSSERealmPanOptions = proto.Field( + proto.MESSAGE, + number=12, + message=PartnerSSERealmPanOptions, + ) + sse_project_number: int = proto.Field( + proto.INT64, + number=13, + ) + + +class ListPartnerSSERealmsRequest(proto.Message): + r"""Message for requesting list of PartnerSSERealms + + Attributes: + parent (str): + Required. Parent value for + ListPartnerSSERealmsRequest + page_size (int): + Requested page size. Server may return fewer + items than requested. If unspecified, server + will pick an appropriate default. + page_token (str): + A token identifying a page of results the + server should return. + filter (str): + Filtering results + order_by (str): + Hint for how to order the results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPartnerSSERealmsResponse(proto.Message): + r"""Message for response to listing PartnerSSERealms + + Attributes: + partner_sse_realms (MutableSequence[google.cloud.network_security_v1alpha1.types.PartnerSSERealm]): + The list of PartnerSSERealm + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + partner_sse_realms: MutableSequence["PartnerSSERealm"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PartnerSSERealm", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetPartnerSSERealmRequest(proto.Message): + r"""Message for getting a PartnerSSERealm + + Attributes: + name (str): + Required. Name of the resource + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreatePartnerSSERealmRequest(proto.Message): + r"""Message for creating a PartnerSSERealm + + Attributes: + parent (str): + Required. Value for parent. + partner_sse_realm_id (str): + Required. Id of the requesting object If auto-generating Id + server-side, remove this field and partner_sse_realm_id from + the method_signature of Create RPC + partner_sse_realm (google.cloud.network_security_v1alpha1.types.PartnerSSERealm): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + partner_sse_realm_id: str = proto.Field( + proto.STRING, + number=2, + ) + partner_sse_realm: "PartnerSSERealm" = proto.Field( + proto.MESSAGE, + number=3, + message="PartnerSSERealm", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeletePartnerSSERealmRequest(proto.Message): + r"""Message for deleting a PartnerSSERealm + + Attributes: + name (str): + Required. Name of the resource + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/tls_inspection_policy.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/tls_inspection_policy.py new file mode 100644 index 000000000000..35c271e0a30d --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/tls_inspection_policy.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "TlsInspectionPolicy", + "CreateTlsInspectionPolicyRequest", + "ListTlsInspectionPoliciesRequest", + "ListTlsInspectionPoliciesResponse", + "GetTlsInspectionPolicyRequest", + "DeleteTlsInspectionPolicyRequest", + "UpdateTlsInspectionPolicyRequest", + }, +) + + +class TlsInspectionPolicy(proto.Message): + r"""The TlsInspectionPolicy resource contains references to CA + pools in Certificate Authority Service and associated metadata. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the resource. Name is of the form + projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy} + tls_inspection_policy should match the + pattern:(^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). + description (str): + Optional. Free-text description of the + resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + ca_pool (str): + Required. A CA pool resource used to issue interception + certificates. The CA pool string has a relative resource + path following the form + "projects/{project}/locations/{location}/caPools/{ca_pool}". + trust_config (str): + Optional. A TrustConfig resource used when making a + connection to the TLS server. This is a relative resource + path following the form + "projects/{project}/locations/{location}/trustConfigs/{trust_config}". + This is necessary to intercept TLS connections to servers + with certificates signed by a private CA or self-signed + certificates. Note that Secure Web Proxy does not yet honor + this field. + exclude_public_ca_set (bool): + Optional. If FALSE (the default), use our default set of + public CAs in addition to any CAs specified in trust_config. + These public CAs are currently based on the Mozilla Root + Program and are subject to change over time. If TRUE, do not + accept our default set of public CAs. Only CAs specified in + trust_config will be accepted. This defaults to FALSE (use + public CAs in addition to trust_config) for backwards + compatibility, but trusting public root CAs is *not + recommended* unless the traffic in question is outbound to + public web servers. When possible, prefer setting this to + "false" and explicitly specifying trusted CAs and + certificates in a TrustConfig. Note that Secure Web Proxy + does not yet honor this field. + + This field is a member of `oneof`_ ``_exclude_public_ca_set``. + min_tls_version (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy.TlsVersion): + Optional. Minimum TLS version that the + firewall should use when negotiating connections + with both clients and servers. If this is not + set, then the default value is to allow the + broadest set of clients and servers (TLS 1.0 or + higher). Setting this to more restrictive values + may improve security, but may also prevent the + firewall from connecting to some clients or + servers. + Note that Secure Web Proxy does not yet honor + this field. + tls_feature_profile (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy.Profile): + Optional. The selected Profile. If this is not set, then the + default value is to allow the broadest set of clients and + servers ("PROFILE_COMPATIBLE"). Setting this to more + restrictive values may improve security, but may also + prevent the TLS inspection proxy from connecting to some + clients or servers. Note that Secure Web Proxy does not yet + honor this field. + custom_tls_features (MutableSequence[str]): + Optional. List of custom TLS cipher suites selected. This + field is valid only if the selected tls_feature_profile is + CUSTOM. The + [compute.SslPoliciesService.ListAvailableFeatures][] method + returns the set of features that can be specified in this + list. Note that Secure Web Proxy does not yet honor this + field. + """ + + class TlsVersion(proto.Enum): + r"""The minimum version of TLS protocol that can be used by + clients or servers to establish a connection with the TLS + inspection proxy. + + Values: + TLS_VERSION_UNSPECIFIED (0): + Indicates no TLS version was specified. + TLS_1_0 (1): + TLS 1.0 + TLS_1_1 (2): + TLS 1.1 + TLS_1_2 (3): + TLS 1.2 + TLS_1_3 (4): + TLS 1.3 + """ + TLS_VERSION_UNSPECIFIED = 0 + TLS_1_0 = 1 + TLS_1_1 = 2 + TLS_1_2 = 3 + TLS_1_3 = 4 + + class Profile(proto.Enum): + r"""Profile specifies the set of TLS cipher suites (and possibly + other features in the future) that can be used by the firewall + when negotiating TLS connections with clients and servers. The + meaning of these fields is identical to the load balancers' + SSLPolicy resource. + + Values: + PROFILE_UNSPECIFIED (0): + Indicates no profile was specified. + PROFILE_COMPATIBLE (1): + Compatible profile. Allows the broadest set + of clients, even those which support only + out-of-date SSL features to negotiate with the + TLS inspection proxy. + PROFILE_MODERN (2): + Modern profile. Supports a wide set of SSL + features, allowing modern clients to negotiate + SSL with the TLS inspection proxy. + PROFILE_RESTRICTED (3): + Restricted profile. Supports a reduced set of + SSL features, intended to meet stricter + compliance requirements. + PROFILE_CUSTOM (4): + Custom profile. Allow only the set of allowed SSL features + specified in the custom_features field of SslPolicy. + """ + PROFILE_UNSPECIFIED = 0 + PROFILE_COMPATIBLE = 1 + PROFILE_MODERN = 2 + PROFILE_RESTRICTED = 3 + PROFILE_CUSTOM = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ca_pool: str = proto.Field( + proto.STRING, + number=5, + ) + trust_config: str = proto.Field( + proto.STRING, + number=6, + ) + exclude_public_ca_set: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) + min_tls_version: TlsVersion = proto.Field( + proto.ENUM, + number=8, + enum=TlsVersion, + ) + tls_feature_profile: Profile = proto.Field( + proto.ENUM, + number=9, + enum=Profile, + ) + custom_tls_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +class CreateTlsInspectionPolicyRequest(proto.Message): + r"""Request used by the CreateTlsInspectionPolicy method. + + Attributes: + parent (str): + Required. The parent resource of the TlsInspectionPolicy. + Must be in the format + ``projects/{project}/locations/{location}``. + tls_inspection_policy_id (str): + Required. Short name of the TlsInspectionPolicy resource to + be created. This value should be 1-63 characters long, + containing only letters, numbers, hyphens, and underscores, + and should not start with a number. E.g. + "tls_inspection_policy1". + tls_inspection_policy (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy): + Required. TlsInspectionPolicy resource to be + created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tls_inspection_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + tls_inspection_policy: "TlsInspectionPolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="TlsInspectionPolicy", + ) + + +class ListTlsInspectionPoliciesRequest(proto.Message): + r"""Request used with the ListTlsInspectionPolicies method. + + Attributes: + parent (str): + Required. The project and location from which the + TlsInspectionPolicies should be listed, specified in the + format ``projects/{project}/locations/{location}``. + page_size (int): + Maximum number of TlsInspectionPolicies to + return per call. + page_token (str): + The value returned by the last + 'ListTlsInspectionPoliciesResponse' Indicates + that this is a continuation of a prior + 'ListTlsInspectionPolicies' call, and that the + system should return the next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTlsInspectionPoliciesResponse(proto.Message): + r"""Response returned by the ListTlsInspectionPolicies method. + + Attributes: + tls_inspection_policies (MutableSequence[google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy]): + List of TlsInspectionPolicies resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then 'next_page_token' is included. To get the + next set of results, call this method again using the value + of 'next_page_token' as 'page_token'. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + tls_inspection_policies: MutableSequence[ + "TlsInspectionPolicy" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TlsInspectionPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetTlsInspectionPolicyRequest(proto.Message): + r"""Request used by the GetTlsInspectionPolicy method. + + Attributes: + name (str): + Required. A name of the TlsInspectionPolicy to get. Must be + in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTlsInspectionPolicyRequest(proto.Message): + r"""Request used by the DeleteTlsInspectionPolicy method. + + Attributes: + name (str): + Required. A name of the TlsInspectionPolicy to delete. Must + be in the format + ``projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}``. + force (bool): + If set to true, any rules for this + TlsInspectionPolicy will also be deleted. + (Otherwise, the request will only work if the + TlsInspectionPolicy has no rules.) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class UpdateTlsInspectionPolicyRequest(proto.Message): + r"""Request used by the UpdateTlsInspectionPolicy method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the TlsInspectionPolicy resource by the + update. The fields specified in the update_mask are relative + to the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + tls_inspection_policy (google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy): + Required. Updated TlsInspectionPolicy + resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + tls_inspection_policy: "TlsInspectionPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="TlsInspectionPolicy", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/url_list.py b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/url_list.py new file mode 100644 index 000000000000..02bb87b89493 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1alpha1/types/url_list.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1alpha1", + manifest={ + "UrlList", + "ListUrlListsRequest", + "ListUrlListsResponse", + "GetUrlListRequest", + "CreateUrlListRequest", + "UpdateUrlListRequest", + "DeleteUrlListRequest", + }, +) + + +class UrlList(proto.Message): + r"""UrlList proto helps users to set reusable, independently + manageable lists of hosts, host patterns, URLs, URL patterns. + + Attributes: + name (str): + Required. Name of the resource provided by the user. Name is + of the form + projects/{project}/locations/{location}/urlLists/{url_list} + url_list should match the + pattern:(^\ `a-z <[a-z0-9-]{0,61}[a-z0-9]>`__?$). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the security policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the security policy + was updated. + description (str): + Optional. Free-text description of the + resource. + values (MutableSequence[str]): + Required. FQDNs and URLs. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class ListUrlListsRequest(proto.Message): + r"""Request used by the ListUrlList method. + + Attributes: + parent (str): + Required. The project and location from which the UrlLists + should be listed, specified in the format + ``projects/{project}/locations/{location}``. + page_size (int): + Maximum number of UrlLists to return per + call. + page_token (str): + The value returned by the last ``ListUrlListsResponse`` + Indicates that this is a continuation of a prior + ``ListUrlLists`` call, and that the system should return the + next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListUrlListsResponse(proto.Message): + r"""Response returned by the ListUrlLists method. + + Attributes: + url_lists (MutableSequence[google.cloud.network_security_v1alpha1.types.UrlList]): + List of UrlList resources. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + url_lists: MutableSequence["UrlList"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="UrlList", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetUrlListRequest(proto.Message): + r"""Request used by the GetUrlList method. + + Attributes: + name (str): + Required. A name of the UrlList to get. Must be in the + format ``projects/*/locations/{location}/urlLists/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateUrlListRequest(proto.Message): + r"""Request used by the CreateUrlList method. + + Attributes: + parent (str): + Required. The parent resource of the UrlList. Must be in the + format ``projects/*/locations/{location}``. + url_list_id (str): + Required. Short name of the UrlList resource to be created. + This value should be 1-63 characters long, containing only + letters, numbers, hyphens, and underscores, and should not + start with a number. E.g. "url_list". + url_list (google.cloud.network_security_v1alpha1.types.UrlList): + Required. UrlList resource to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + url_list_id: str = proto.Field( + proto.STRING, + number=2, + ) + url_list: "UrlList" = proto.Field( + proto.MESSAGE, + number=3, + message="UrlList", + ) + + +class UpdateUrlListRequest(proto.Message): + r"""Request used by UpdateUrlList method. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask is used to specify the fields to be + overwritten in the UrlList resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + url_list (google.cloud.network_security_v1alpha1.types.UrlList): + Required. Updated UrlList resource. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + url_list: "UrlList" = proto.Field( + proto.MESSAGE, + number=2, + message="UrlList", + ) + + +class DeleteUrlListRequest(proto.Message): + r"""Request used by the DeleteUrlList method. + + Attributes: + name (str): + Required. A name of the UrlList to delete. Must be in the + format ``projects/*/locations/{location}/urlLists/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py new file mode 100644 index 000000000000..3d975fef9064 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py new file mode 100644 index 000000000000..b2f5533a719b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py new file mode 100644 index 000000000000..ec74634a4679 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + await client.delete_dns_threat_detector(request=request) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py new file mode 100644 index 000000000000..51ef19571220 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + client.delete_dns_threat_detector(request=request) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py new file mode 100644 index 000000000000..af66bb537a54 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py new file mode 100644 index 000000000000..be26e4962f85 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py new file mode 100644 index 000000000000..04fc3e95216b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDnsThreatDetectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py new file mode 100644 index 000000000000..8b4a4d0b570a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDnsThreatDetectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py new file mode 100644 index 000000000000..adef2a1b3a4b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py new file mode 100644 index 000000000000..288e4a2d1b5d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1alpha1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1alpha1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1alpha1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py new file mode 100644 index 000000000000..034ecf9d5951 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint_association = ( + network_security_v1alpha1.FirewallEndpointAssociation() + ) + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.create_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py new file mode 100644 index 000000000000..e3a3704a8cbf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint_association = ( + network_security_v1alpha1.FirewallEndpointAssociation() + ) + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.create_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py new file mode 100644 index 000000000000..e4cbd4524b66 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.create_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py new file mode 100644 index 000000000000..2ee2862476ce --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.create_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py new file mode 100644 index 000000000000..a4c00c37001e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py new file mode 100644 index 000000000000..8629348e23d1 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py new file mode 100644 index 000000000000..f18035b276ca --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py new file mode 100644 index 000000000000..986a85691a7f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py new file mode 100644 index 000000000000..017dc4c4cc0c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_firewall_endpoint_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py new file mode 100644 index 000000000000..e3d8c3aefc6c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_firewall_endpoint_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py new file mode 100644 index 000000000000..2ba5bf57cfc3 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + response = await client.get_firewall_endpoint(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py new file mode 100644 index 000000000000..f7abe8ca5a33 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetFirewallEndpointRequest( + name="name_value", + ) + + # Make the request + response = client.get_firewall_endpoint(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py new file mode 100644 index 000000000000..c8140b051384 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFirewallEndpointAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_firewall_endpoint_associations(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoint_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py new file mode 100644 index 000000000000..71138861f761 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFirewallEndpointAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_firewall_endpoint_associations(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoint_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py new file mode 100644 index 000000000000..5593877a5b8a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFirewallEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_firewall_endpoints(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoints(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py new file mode 100644 index 000000000000..03e7d0fb49cf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFirewallEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_firewall_endpoints(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListFirewallEndpointsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_firewall_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py new file mode 100644 index 000000000000..9a76e5dbfeaf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint_association = ( + network_security_v1alpha1.FirewallEndpointAssociation() + ) + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointAssociationRequest( + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.update_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py new file mode 100644 index 000000000000..aa548d134431 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFirewallEndpointAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_firewall_endpoint_association(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint_association = ( + network_security_v1alpha1.FirewallEndpointAssociation() + ) + firewall_endpoint_association.network = "network_value" + firewall_endpoint_association.firewall_endpoint = "firewall_endpoint_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointAssociationRequest( + firewall_endpoint_association=firewall_endpoint_association, + ) + + # Make the request + operation = client.update_firewall_endpoint_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py new file mode 100644 index 000000000000..e7d3aad4bd3e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationAsyncClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointRequest( + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.update_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py new file mode 100644 index 000000000000..91f1cef31a78 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFirewallEndpoint +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_firewall_endpoint(): + # Create a client + client = network_security_v1alpha1.FirewallActivationClient() + + # Initialize request argument(s) + firewall_endpoint = network_security_v1alpha1.FirewallEndpoint() + firewall_endpoint.billing_project_id = "billing_project_id_value" + + request = network_security_v1alpha1.UpdateFirewallEndpointRequest( + firewall_endpoint=firewall_endpoint, + ) + + # Make the request + operation = client.update_firewall_endpoint(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py new file mode 100644 index 000000000000..0be15f1ff2fe --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.create_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py new file mode 100644 index 000000000000..c87301bf43a8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.create_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py new file mode 100644 index 000000000000..d60a43e922b5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.create_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py new file mode 100644 index 000000000000..d95c13729fe5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.create_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py new file mode 100644 index 000000000000..d149c2b519f2 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = ( + network_security_v1alpha1.InterceptEndpointGroupAssociation() + ) + intercept_endpoint_group_association.intercept_endpoint_group = ( + "intercept_endpoint_group_value" + ) + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.create_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py new file mode 100644 index 000000000000..70771f0a76f8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = ( + network_security_v1alpha1.InterceptEndpointGroupAssociation() + ) + intercept_endpoint_group_association.intercept_endpoint_group = ( + "intercept_endpoint_group_value" + ) + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.create_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py new file mode 100644 index 000000000000..947ee5cb791f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = ( + "intercept_deployment_group_value" + ) + + request = network_security_v1alpha1.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.create_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py new file mode 100644 index 000000000000..9b043f1b6ac8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = ( + "intercept_deployment_group_value" + ) + + request = network_security_v1alpha1.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.create_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py new file mode 100644 index 000000000000..104f14e9f218 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py new file mode 100644 index 000000000000..091c1d76862d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py new file mode 100644 index 000000000000..09d996ebaf78 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py new file mode 100644 index 000000000000..8dd187c72345 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py new file mode 100644 index 000000000000..4cc1d8710a9b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py new file mode 100644 index 000000000000..f7cc01e2501a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py new file mode 100644 index 000000000000..71092b57de35 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py new file mode 100644 index 000000000000..b4ddbdb5732e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py new file mode 100644 index 000000000000..434d3ce8c5e2 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_deployment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py new file mode 100644 index 000000000000..0d3fb116ddb0 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py new file mode 100644 index 000000000000..768be85664e6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py new file mode 100644 index 000000000000..5f38d16fad4f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_deployment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py new file mode 100644 index 000000000000..1888eaf164e8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_endpoint_group_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py new file mode 100644 index 000000000000..91c923677d30 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_endpoint_group_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py new file mode 100644 index 000000000000..2aff1e216363 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intercept_endpoint_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py new file mode 100644 index 000000000000..2c6afdfab687 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_intercept_endpoint_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py new file mode 100644 index 000000000000..74f35556c2dd --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_intercept_deployment_groups(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py new file mode 100644 index 000000000000..a5ea22b883dd --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_intercept_deployment_groups(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py new file mode 100644 index 000000000000..440d2bf8d401 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_intercept_deployments(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py new file mode 100644 index 000000000000..61bf0871c665 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_intercept_deployments(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py new file mode 100644 index 000000000000..baf8109d22b8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptEndpointGroupAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_intercept_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_group_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py new file mode 100644 index 000000000000..7bc9563768ca --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptEndpointGroupAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_intercept_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_group_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py new file mode 100644 index 000000000000..c0822ac40b5f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptEndpointGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_intercept_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py new file mode 100644 index 000000000000..6414453b91a5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInterceptEndpointGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_intercept_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListInterceptEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intercept_endpoint_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py new file mode 100644 index 000000000000..f939eb189bb5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentRequest( + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.update_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py new file mode 100644 index 000000000000..35a1b03e0bbf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentGroupRequest( + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.update_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py new file mode 100644 index 000000000000..46ec393aca35 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_intercept_deployment_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment_group = network_security_v1alpha1.InterceptDeploymentGroup() + intercept_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentGroupRequest( + intercept_deployment_group=intercept_deployment_group, + ) + + # Make the request + operation = client.update_intercept_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py new file mode 100644 index 000000000000..6bd9965bef8c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_intercept_deployment(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_deployment = network_security_v1alpha1.InterceptDeployment() + intercept_deployment.forwarding_rule = "forwarding_rule_value" + intercept_deployment.intercept_deployment_group = "intercept_deployment_group_value" + + request = network_security_v1alpha1.UpdateInterceptDeploymentRequest( + intercept_deployment=intercept_deployment, + ) + + # Make the request + operation = client.update_intercept_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py new file mode 100644 index 000000000000..7f86efc6e7f6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = ( + network_security_v1alpha1.InterceptEndpointGroupAssociation() + ) + intercept_endpoint_group_association.intercept_endpoint_group = ( + "intercept_endpoint_group_value" + ) + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupAssociationRequest( + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.update_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py new file mode 100644 index 000000000000..cf003720cdd8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_intercept_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group_association = ( + network_security_v1alpha1.InterceptEndpointGroupAssociation() + ) + intercept_endpoint_group_association.intercept_endpoint_group = ( + "intercept_endpoint_group_value" + ) + intercept_endpoint_group_association.network = "network_value" + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupAssociationRequest( + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + + # Make the request + operation = client.update_intercept_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py new file mode 100644 index 000000000000..bd201699329d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptAsyncClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = ( + "intercept_deployment_group_value" + ) + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupRequest( + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.update_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py new file mode 100644 index 000000000000..2e894eed702e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInterceptEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_intercept_endpoint_group(): + # Create a client + client = network_security_v1alpha1.InterceptClient() + + # Initialize request argument(s) + intercept_endpoint_group = network_security_v1alpha1.InterceptEndpointGroup() + intercept_endpoint_group.intercept_deployment_group = ( + "intercept_deployment_group_value" + ) + + request = network_security_v1alpha1.UpdateInterceptEndpointGroupRequest( + intercept_endpoint_group=intercept_endpoint_group, + ) + + # Make the request + operation = client.update_intercept_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py new file mode 100644 index 000000000000..bd52bc992c22 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.create_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py new file mode 100644 index 000000000000..b096ce792b3c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.create_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py new file mode 100644 index 000000000000..b02053ff4850 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.create_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py new file mode 100644 index 000000000000..fc6ccc4ca3d7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.create_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py new file mode 100644 index 000000000000..e6b7923c8fda --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py new file mode 100644 index 000000000000..1044404864ad --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py new file mode 100644 index 000000000000..8e858203746e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py new file mode 100644 index 000000000000..a5a38a2ca162 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Make the request + operation = client.create_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py new file mode 100644 index 000000000000..4b5ae2bb59a7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py new file mode 100644 index 000000000000..400d59916345 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py new file mode 100644 index 000000000000..5675f9857e1b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py new file mode 100644 index 000000000000..2f1cd0daa366 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py new file mode 100644 index 000000000000..83b0d5ddce5b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py new file mode 100644 index 000000000000..b338ded2cade --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py new file mode 100644 index 000000000000..6567bc90bf90 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py new file mode 100644 index 000000000000..8a33956e3617 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py new file mode 100644 index 000000000000..418437b0be79 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_deployment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py new file mode 100644 index 000000000000..7a39d5d63fd6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py new file mode 100644 index 000000000000..b0fc27a6f393 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py new file mode 100644 index 000000000000..3a2631a0373a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_deployment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py new file mode 100644 index 000000000000..0847f0d8ba42 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_endpoint_group_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py new file mode 100644 index 000000000000..cfd77b97f17d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_endpoint_group_association(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py new file mode 100644 index 000000000000..3f441ae649af --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mirroring_endpoint_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py new file mode 100644 index 000000000000..1dfd931f75a7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_mirroring_endpoint_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py new file mode 100644 index 000000000000..61a00ce6a44c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_mirroring_deployment_groups(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py new file mode 100644 index 000000000000..d40ababdaf56 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_mirroring_deployment_groups(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py new file mode 100644 index 000000000000..36257e075e92 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_mirroring_deployments(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py new file mode 100644 index 000000000000..6097f01b21a5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_mirroring_deployments(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py new file mode 100644 index 000000000000..077df8724137 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringEndpointGroupAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_mirroring_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_group_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py new file mode 100644 index 000000000000..77a4f5658334 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringEndpointGroupAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_mirroring_endpoint_group_associations(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_group_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py new file mode 100644 index 000000000000..abd44d8214fb --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringEndpointGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_mirroring_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py new file mode 100644 index 000000000000..cd289b71d21b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMirroringEndpointGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_mirroring_endpoint_groups(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListMirroringEndpointGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_mirroring_endpoint_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py new file mode 100644 index 000000000000..3114723a0019 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentRequest( + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.update_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py new file mode 100644 index 000000000000..00ee568764c9 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentGroupRequest( + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.update_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py new file mode 100644 index 000000000000..e7c2bee5ddb8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_mirroring_deployment_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment_group = network_security_v1alpha1.MirroringDeploymentGroup() + mirroring_deployment_group.network = "network_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentGroupRequest( + mirroring_deployment_group=mirroring_deployment_group, + ) + + # Make the request + operation = client.update_mirroring_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py new file mode 100644 index 000000000000..8bd398abbeb2 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_mirroring_deployment(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + mirroring_deployment = network_security_v1alpha1.MirroringDeployment() + mirroring_deployment.forwarding_rule = "forwarding_rule_value" + mirroring_deployment.mirroring_deployment_group = "mirroring_deployment_group_value" + + request = network_security_v1alpha1.UpdateMirroringDeploymentRequest( + mirroring_deployment=mirroring_deployment, + ) + + # Make the request + operation = client.update_mirroring_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py new file mode 100644 index 000000000000..77850f822019 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupAssociationRequest() + + # Make the request + operation = client.update_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py new file mode 100644 index 000000000000..20e816596347 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringEndpointGroupAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_mirroring_endpoint_group_association(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupAssociationRequest() + + # Make the request + operation = client.update_mirroring_endpoint_group_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py new file mode 100644 index 000000000000..9716c2fb9aaf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupRequest() + + # Make the request + operation = client.update_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py new file mode 100644 index 000000000000..38989c028da4 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMirroringEndpointGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_mirroring_endpoint_group(): + # Create a client + client = network_security_v1alpha1.MirroringClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateMirroringEndpointGroupRequest() + + # Make the request + operation = client.update_mirroring_endpoint_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py new file mode 100644 index 000000000000..bda290bdffb5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" + + request = network_security_v1alpha1.CreateAuthorizationPolicyRequest( + parent="parent_value", + authorization_policy_id="authorization_policy_id_value", + authorization_policy=authorization_policy, + ) + + # Make the request + operation = client.create_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py new file mode 100644 index 000000000000..81ba1c03bb99 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" + + request = network_security_v1alpha1.CreateAuthorizationPolicyRequest( + parent="parent_value", + authorization_policy_id="authorization_policy_id_value", + authorization_policy=authorization_policy, + ) + + # Make the request + operation = client.create_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py new file mode 100644 index 000000000000..f06f27e28fca --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ["resources_value1", "resources_value2"] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + authz_policy=authz_policy, + ) + + # Make the request + operation = client.create_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py new file mode 100644 index 000000000000..088c667b0fb2 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ["resources_value1", "resources_value2"] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + authz_policy=authz_policy, + ) + + # Make the request + operation = client.create_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py new file mode 100644 index 000000000000..1e55ed81ad04 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + backend_authentication_config = ( + network_security_v1alpha1.BackendAuthenticationConfig() + ) + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.create_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py new file mode 100644 index 000000000000..1f8e24810e17 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + backend_authentication_config = ( + network_security_v1alpha1.BackendAuthenticationConfig() + ) + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.create_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py new file mode 100644 index 000000000000..5b623a362041 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.create_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py new file mode 100644 index 000000000000..5076795d261a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.create_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py new file mode 100644 index 000000000000..2675832e3c02 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.create_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py new file mode 100644 index 000000000000..f85e0e0bc1a7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.create_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py new file mode 100644 index 000000000000..b40c487dda94 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.create_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py new file mode 100644 index 000000000000..9acc6f9c1474 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.create_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py new file mode 100644 index 000000000000..71b19d479929 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.create_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py new file mode 100644 index 000000000000..8ae7cdfb1a71 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.create_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_async.py new file mode 100644 index 000000000000..d84c1fc03b82 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ["values_value1", "values_value2"] + + request = network_security_v1alpha1.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + url_list=url_list, + ) + + # Make the request + operation = client.create_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py new file mode 100644 index 000000000000..ab95094753bb --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ["values_value1", "values_value2"] + + request = network_security_v1alpha1.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + url_list=url_list, + ) + + # Make the request + operation = client.create_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py new file mode 100644 index 000000000000..ca50ecac76d7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthorizationPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py new file mode 100644 index 000000000000..56767c36fdf8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthorizationPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py new file mode 100644 index 000000000000..ccb005b9a17f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py new file mode 100644 index 000000000000..d86f51ba2b60 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py new file mode 100644 index 000000000000..3f797230ae48 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py new file mode 100644 index 000000000000..b975f6790fea --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py new file mode 100644 index 000000000000..b3c6f0e849f4 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py new file mode 100644 index 000000000000..398d89ca3f7e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py new file mode 100644 index 000000000000..20642d3dda20 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py new file mode 100644 index 000000000000..e7aa912a3ece --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py new file mode 100644 index 000000000000..b4155dd19ce6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py new file mode 100644 index 000000000000..a0db154953ac --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py new file mode 100644 index 000000000000..e62399b5a571 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py new file mode 100644 index 000000000000..a7e89963f69a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py new file mode 100644 index 000000000000..e98a2d941e35 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteUrlListRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py new file mode 100644 index 000000000000..c19908a2ad1f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteUrlListRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py new file mode 100644 index 000000000000..d51f3150f557 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthorizationPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_authorization_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py new file mode 100644 index 000000000000..6c1edb004bbe --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthorizationPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_authorization_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py new file mode 100644 index 000000000000..f1bcf34b56ed --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_authz_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py new file mode 100644 index 000000000000..8f2ef54bcaff --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetAuthzPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_authz_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py new file mode 100644 index 000000000000..4f1fb72e9733 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backend_authentication_config(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py new file mode 100644 index 000000000000..7a1b1c8c2df1 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_backend_authentication_config(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py new file mode 100644 index 000000000000..e3cb4904c995 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_gateway_security_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py new file mode 100644 index 000000000000..ed8f3889f851 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_gateway_security_policy_rule(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py new file mode 100644 index 000000000000..086b71c6d5cd --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Make the request + response = client.get_gateway_security_policy_rule(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py new file mode 100644 index 000000000000..81c6e124d368 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_gateway_security_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py new file mode 100644 index 000000000000..b77e802ad575 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_server_tls_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py new file mode 100644 index 000000000000..37b512e992e9 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetServerTlsPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_server_tls_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py new file mode 100644 index 000000000000..23e15fe5b54a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tls_inspection_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py new file mode 100644 index 000000000000..e6e4ea518328 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_tls_inspection_policy(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_async.py new file mode 100644 index 000000000000..d8e6fd3449b5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetUrlListRequest( + name="name_value", + ) + + # Make the request + response = await client.get_url_list(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py new file mode 100644 index 000000000000..a165959ef657 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetUrlListRequest( + name="name_value", + ) + + # Make the request + response = client.get_url_list(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py new file mode 100644 index 000000000000..f9867a84a944 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAuthorizationPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_authorization_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthorizationPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authorization_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py new file mode 100644 index 000000000000..c8d3643125f1 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAuthorizationPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_authorization_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthorizationPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authorization_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py new file mode 100644 index 000000000000..621e63074c81 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAuthzPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_authz_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthzPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authz_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py new file mode 100644 index 000000000000..6ce563940327 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAuthzPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_authz_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListAuthzPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_authz_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py new file mode 100644 index 000000000000..b5bc35d8b25c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackendAuthenticationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_backend_authentication_configs(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backend_authentication_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py new file mode 100644 index 000000000000..45b59e4152f1 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackendAuthenticationConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_backend_authentication_configs(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backend_authentication_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py new file mode 100644 index 000000000000..4f70ef8985cb --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGatewaySecurityPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_gateway_security_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py new file mode 100644 index 000000000000..abad7fbdb806 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGatewaySecurityPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_gateway_security_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py new file mode 100644 index 000000000000..b9c459a97e6b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGatewaySecurityPolicyRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_gateway_security_policy_rules(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policy_rules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py new file mode 100644 index 000000000000..b3c078096f2b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGatewaySecurityPolicyRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_gateway_security_policy_rules(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateway_security_policy_rules(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py new file mode 100644 index 000000000000..ceacec9587dc --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServerTlsPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_server_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListServerTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_server_tls_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py new file mode 100644 index 000000000000..5775b1245c28 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServerTlsPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_server_tls_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListServerTlsPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_server_tls_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py new file mode 100644 index 000000000000..09bedbe4a545 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTlsInspectionPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_tls_inspection_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListTlsInspectionPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_inspection_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py new file mode 100644 index 000000000000..e976cfa27ec4 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTlsInspectionPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_tls_inspection_policies(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListTlsInspectionPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_inspection_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py new file mode 100644 index 000000000000..41b2076ff1ce --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUrlLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_url_lists(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListUrlListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_url_lists(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py new file mode 100644 index 000000000000..ecae30770383 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUrlLists +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_url_lists(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListUrlListsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_url_lists(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py new file mode 100644 index 000000000000..8036f6cb0b09 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" + + request = network_security_v1alpha1.UpdateAuthorizationPolicyRequest( + authorization_policy=authorization_policy, + ) + + # Make the request + operation = client.update_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py new file mode 100644 index 000000000000..1e9a0f3318f3 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAuthorizationPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_authorization_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authorization_policy = network_security_v1alpha1.AuthorizationPolicy() + authorization_policy.name = "name_value" + authorization_policy.action = "DENY" + + request = network_security_v1alpha1.UpdateAuthorizationPolicyRequest( + authorization_policy=authorization_policy, + ) + + # Make the request + operation = client.update_authorization_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py new file mode 100644 index 000000000000..fb510ec66238 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ["resources_value1", "resources_value2"] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.UpdateAuthzPolicyRequest( + authz_policy=authz_policy, + ) + + # Make the request + operation = client.update_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py new file mode 100644 index 000000000000..ca18fbcc48ff --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAuthzPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_authz_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + authz_policy = network_security_v1alpha1.AuthzPolicy() + authz_policy.name = "name_value" + authz_policy.target.load_balancing_scheme = "INTERNAL_SELF_MANAGED" + authz_policy.target.resources = ["resources_value1", "resources_value2"] + authz_policy.action = "CUSTOM" + + request = network_security_v1alpha1.UpdateAuthzPolicyRequest( + authz_policy=authz_policy, + ) + + # Make the request + operation = client.update_authz_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py new file mode 100644 index 000000000000..ef04a3533402 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + backend_authentication_config = ( + network_security_v1alpha1.BackendAuthenticationConfig() + ) + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.UpdateBackendAuthenticationConfigRequest( + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.update_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py new file mode 100644 index 000000000000..180bceadeb8c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackendAuthenticationConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_backend_authentication_config(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + backend_authentication_config = ( + network_security_v1alpha1.BackendAuthenticationConfig() + ) + backend_authentication_config.name = "name_value" + + request = network_security_v1alpha1.UpdateBackendAuthenticationConfigRequest( + backend_authentication_config=backend_authentication_config, + ) + + # Make the request + operation = client.update_backend_authentication_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py new file mode 100644 index 000000000000..b035e69dd36c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRequest( + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.update_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py new file mode 100644 index 000000000000..97d7f66dc5cc --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRuleRequest( + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.update_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py new file mode 100644 index 000000000000..adcce87a6c35 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGatewaySecurityPolicyRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_gateway_security_policy_rule(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy_rule = network_security_v1alpha1.GatewaySecurityPolicyRule() + gateway_security_policy_rule.basic_profile = "DENY" + gateway_security_policy_rule.name = "name_value" + gateway_security_policy_rule.enabled = True + gateway_security_policy_rule.priority = 898 + gateway_security_policy_rule.session_matcher = "session_matcher_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRuleRequest( + gateway_security_policy_rule=gateway_security_policy_rule, + ) + + # Make the request + operation = client.update_gateway_security_policy_rule(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py new file mode 100644 index 000000000000..7a3823de8b68 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGatewaySecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_gateway_security_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + gateway_security_policy = network_security_v1alpha1.GatewaySecurityPolicy() + gateway_security_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateGatewaySecurityPolicyRequest( + gateway_security_policy=gateway_security_policy, + ) + + # Make the request + operation = client.update_gateway_security_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py new file mode 100644 index 000000000000..53290f116d00 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateServerTlsPolicyRequest( + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.update_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py new file mode 100644 index 000000000000..6259056b13a3 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateServerTlsPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_server_tls_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + server_tls_policy = network_security_v1alpha1.ServerTlsPolicy() + server_tls_policy.name = "name_value" + + request = network_security_v1alpha1.UpdateServerTlsPolicyRequest( + server_tls_policy=server_tls_policy, + ) + + # Make the request + operation = client.update_server_tls_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py new file mode 100644 index 000000000000..8af7298b357a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.UpdateTlsInspectionPolicyRequest( + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.update_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py new file mode 100644 index 000000000000..ffb2d29e7a49 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTlsInspectionPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_tls_inspection_policy(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + tls_inspection_policy = network_security_v1alpha1.TlsInspectionPolicy() + tls_inspection_policy.name = "name_value" + tls_inspection_policy.ca_pool = "ca_pool_value" + + request = network_security_v1alpha1.UpdateTlsInspectionPolicyRequest( + tls_inspection_policy=tls_inspection_policy, + ) + + # Make the request + operation = client.update_tls_inspection_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_async.py new file mode 100644 index 000000000000..d982508e813a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityAsyncClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ["values_value1", "values_value2"] + + request = network_security_v1alpha1.UpdateUrlListRequest( + url_list=url_list, + ) + + # Make the request + operation = client.update_url_list(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py new file mode 100644 index 000000000000..8a4a3036160f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateUrlList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_url_list(): + # Create a client + client = network_security_v1alpha1.NetworkSecurityClient() + + # Initialize request argument(s) + url_list = network_security_v1alpha1.UrlList() + url_list.name = "name_value" + url_list.values = ["values_value1", "values_value2"] + + request = network_security_v1alpha1.UpdateUrlListRequest( + url_list=url_list, + ) + + # Make the request + operation = client.update_url_list(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py new file mode 100644 index 000000000000..b5525ecef455 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_security_profile(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + # Make the request + operation = client.create_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py new file mode 100644 index 000000000000..8688ff0da983 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_security_profile_group(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + # Make the request + operation = client.create_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py new file mode 100644 index 000000000000..160f50724b61 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + # Make the request + operation = client.create_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py new file mode 100644 index 000000000000..a3bb7b7812f6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + # Make the request + operation = client.create_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py new file mode 100644 index 000000000000..75033e9b2768 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_security_profile(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py new file mode 100644 index 000000000000..7d8c896e7c8b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_security_profile_group(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py new file mode 100644 index 000000000000..2241ede9b396 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py new file mode 100644 index 000000000000..dd86dd95bde3 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSecurityProfileRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py new file mode 100644 index 000000000000..53edf06d2e70 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_security_profile(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_profile(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py new file mode 100644 index 000000000000..59f146bd0172 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_security_profile_group(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_profile_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py new file mode 100644 index 000000000000..353e658e74b8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_profile_group(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py new file mode 100644 index 000000000000..92085177f15e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSecurityProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_profile(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py new file mode 100644 index 000000000000..b6c615459241 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityProfileGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_security_profile_groups(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfileGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profile_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py new file mode 100644 index 000000000000..7a6cf6d9be9b --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityProfileGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_security_profile_groups(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfileGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profile_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py new file mode 100644 index 000000000000..5d8a31b33f9c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_security_profiles(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py new file mode 100644 index 000000000000..81617925a5e7 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSecurityProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_security_profiles(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSecurityProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py new file mode 100644 index 000000000000..f3de4abed8ee --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_security_profile(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileRequest() + + # Make the request + operation = client.update_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py new file mode 100644 index 000000000000..f6924e722b89 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_security_profile_group(): + # Create a client + client = ( + network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient() + ) + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileGroupRequest() + + # Make the request + operation = client.update_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py new file mode 100644 index 000000000000..725d4a4dbcac --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityProfileGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_security_profile_group(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileGroupRequest() + + # Make the request + operation = client.update_security_profile_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py new file mode 100644 index 000000000000..32ffcb5ce8f4 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSecurityProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_security_profile(): + # Create a client + client = network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.UpdateSecurityProfileRequest() + + # Make the request + operation = client.update_security_profile(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py new file mode 100644 index 000000000000..04cf2e083ab4 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.create_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py new file mode 100644 index 000000000000..6c1c33976657 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.create_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py new file mode 100644 index 000000000000..f470ec95f945 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py new file mode 100644 index 000000000000..1f96a0b450f6 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py new file mode 100644 index 000000000000..0789025103ad --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partner_sse_gateway(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py new file mode 100644 index 000000000000..216acac0ec9d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + # Make the request + response = client.get_partner_sse_gateway(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py new file mode 100644 index 000000000000..730230b0aa2c --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSSEGatewayReference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_sse_gateway_reference(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sse_gateway_reference(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py new file mode 100644 index 000000000000..e56078cfa087 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSSEGatewayReference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_sse_gateway_reference(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_sse_gateway_reference(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py new file mode 100644 index 000000000000..643f6d084c7e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartnerSSEGateways +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_partner_sse_gateways(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSEGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_gateways(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py new file mode 100644 index 000000000000..4501bf2c93bf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartnerSSEGateways +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_partner_sse_gateways(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSEGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_gateways(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py new file mode 100644 index 000000000000..1eeb3de7be29 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSSEGatewayReferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_sse_gateway_references(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSSEGatewayReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sse_gateway_references(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py new file mode 100644 index 000000000000..972fd4b9270d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSSEGatewayReferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_sse_gateway_references(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSSEGatewayReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sse_gateway_references(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py new file mode 100644 index 000000000000..24bc51313c40 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_update_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.UpdatePartnerSSEGatewayRequest( + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.update_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py new file mode 100644 index 000000000000..a7b2071575dc --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePartnerSSEGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_update_partner_sse_gateway(): + # Create a client + client = network_security_v1alpha1.SSEGatewayServiceClient() + + # Initialize request argument(s) + partner_sse_gateway = network_security_v1alpha1.PartnerSSEGateway() + partner_sse_gateway.sse_gateway_reference_id = "sse_gateway_reference_id_value" + + request = network_security_v1alpha1.UpdatePartnerSSEGatewayRequest( + partner_sse_gateway=partner_sse_gateway, + ) + + # Make the request + operation = client.update_partner_sse_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py new file mode 100644 index 000000000000..1e1a88da7744 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + partner_sse_realm = network_security_v1alpha1.PartnerSSERealm() + partner_sse_realm.pairing_key = "pairing_key_value" + + request = network_security_v1alpha1.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + partner_sse_realm=partner_sse_realm, + ) + + # Make the request + operation = client.create_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py new file mode 100644 index 000000000000..4c88a47d8d22 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + partner_sse_realm = network_security_v1alpha1.PartnerSSERealm() + partner_sse_realm.pairing_key = "pairing_key_value" + + request = network_security_v1alpha1.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + partner_sse_realm=partner_sse_realm, + ) + + # Make the request + operation = client.create_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py new file mode 100644 index 000000000000..7d9eabd3115e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + sac_attachment = network_security_v1alpha1.SACAttachment() + sac_attachment.sac_realm = "sac_realm_value" + sac_attachment.ncc_gateway = "ncc_gateway_value" + + request = network_security_v1alpha1.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + sac_attachment=sac_attachment, + ) + + # Make the request + operation = client.create_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py new file mode 100644 index 000000000000..e187d8d4f69d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + sac_attachment = network_security_v1alpha1.SACAttachment() + sac_attachment.sac_realm = "sac_realm_value" + sac_attachment.ncc_gateway = "ncc_gateway_value" + + request = network_security_v1alpha1.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + sac_attachment=sac_attachment, + ) + + # Make the request + operation = client.create_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py new file mode 100644 index 000000000000..7775952c5390 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_create_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + # Make the request + operation = client.create_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py new file mode 100644 index 000000000000..fd325ed9c34d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_create_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + # Make the request + operation = client.create_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py new file mode 100644 index 000000000000..f4df55c307a5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py new file mode 100644 index 000000000000..98b5a255b8cf --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeletePartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_partner_sse_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py new file mode 100644 index 000000000000..2381d52d42da --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py new file mode 100644 index 000000000000..67faebd22151 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACAttachmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_attachment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py new file mode 100644 index 000000000000..a8b901ce2183 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_delete_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACRealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py new file mode 100644 index 000000000000..20ba44360625 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_delete_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.DeleteSACRealmRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_sac_realm(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py new file mode 100644 index 000000000000..0f4cfc33d2cb --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + response = await client.get_partner_sse_realm(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py new file mode 100644 index 000000000000..f1dc04e67210 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPartnerSSERealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_partner_sse_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetPartnerSSERealmRequest( + name="name_value", + ) + + # Make the request + response = client.get_partner_sse_realm(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py new file mode 100644 index 000000000000..5f8da52a1a4f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACAttachmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sac_attachment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py new file mode 100644 index 000000000000..46aa3aabce6a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSACAttachment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_sac_attachment(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACAttachmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_sac_attachment(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py new file mode 100644 index 000000000000..dfc027e549e5 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_get_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACRealmRequest( + name="name_value", + ) + + # Make the request + response = await client.get_sac_realm(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py new file mode 100644 index 000000000000..7155c904a062 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSACRealm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_get_sac_realm(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.GetSACRealmRequest( + name="name_value", + ) + + # Make the request + response = client.get_sac_realm(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py new file mode 100644 index 000000000000..5ea7fe7df173 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartnerSSERealms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_partner_sse_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSERealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_realms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py new file mode 100644 index 000000000000..993b85ea8fc9 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPartnerSSERealms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_partner_sse_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListPartnerSSERealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_partner_sse_realms(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py new file mode 100644 index 000000000000..a383b50168a0 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSACAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_sac_attachments(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_attachments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py new file mode 100644 index 000000000000..af4572208a39 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSACAttachments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_sac_attachments(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACAttachmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_attachments(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py new file mode 100644 index 000000000000..c88b8883db17 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSACRealms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +async def sample_list_sac_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACRealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_realms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py new file mode 100644 index 000000000000..9a90bb198985 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSACRealms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1alpha1 + + +def sample_list_sac_realms(): + # Create a client + client = network_security_v1alpha1.SSERealmServiceClient() + + # Initialize request argument(s) + request = network_security_v1alpha1.ListSACRealmsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sac_realms(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1alpha1.json b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1alpha1.json index e60a51171d71..b39dfc9b19b2 100644 --- a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1alpha1.json +++ b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1alpha1.json @@ -16,33 +16,33 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", - "shortName": "NetworkSecurityAsyncClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient.create_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.CreateDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "CreateClientTlsPolicy" + "shortName": "CreateDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.CreateDnsThreatDetectorRequest" }, { "name": "parent", "type": "str" }, { - "name": "client_tls_policy", - "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector" }, { - "name": "client_tls_policy_id", + "name": "dns_threat_detector_id", "type": "str" }, { @@ -58,22 +58,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "create_dns_threat_detector" }, - "description": "Sample for CreateClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_async.py", + "description": "Sample for CreateDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateClientTlsPolicy_async", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async", "segments": [ { - "end": 60, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 55, "start": 27, "type": "SHORT" }, @@ -83,54 +83,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_async.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", - "shortName": "NetworkSecurityClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient.create_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.CreateDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "CreateClientTlsPolicy" + "shortName": "CreateDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.CreateDnsThreatDetectorRequest" }, { "name": "parent", "type": "str" }, { - "name": "client_tls_policy", - "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector" }, { - "name": "client_tls_policy_id", + "name": "dns_threat_detector_id", "type": "str" }, { @@ -146,22 +146,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "create_dns_threat_detector" }, - "description": "Sample for CreateClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_sync.py", + "description": "Sample for CreateDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateClientTlsPolicy_sync", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync", "segments": [ { - "end": 60, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 55, "start": 27, "type": "SHORT" }, @@ -171,44 +171,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_sync.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", - "shortName": "NetworkSecurityAsyncClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient.delete_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.DeleteDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "DeleteClientTlsPolicy" + "shortName": "DeleteDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.DeleteDnsThreatDetectorRequest" }, { "name": "name", @@ -227,22 +227,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_client_tls_policy" + "shortName": "delete_dns_threat_detector" }, - "description": "Sample for DeleteClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_async.py", + "description": "Sample for DeleteDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteClientTlsPolicy_async", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async", "segments": [ { - "end": 55, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 49, "start": 27, "type": "SHORT" }, @@ -257,38 +256,36 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_async.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", - "shortName": "NetworkSecurityClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient.delete_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.DeleteDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "DeleteClientTlsPolicy" + "shortName": "DeleteDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.DeleteDnsThreatDetectorRequest" }, { "name": "name", @@ -307,22 +304,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_client_tls_policy" + "shortName": "delete_dns_threat_detector" }, - "description": "Sample for DeleteClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_sync.py", + "description": "Sample for DeleteDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteClientTlsPolicy_sync", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync", "segments": [ { - "end": 55, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 49, "start": 27, "type": "SHORT" }, @@ -337,39 +333,37 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_sync.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", - "shortName": "NetworkSecurityAsyncClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient.get_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.GetDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "GetClientTlsPolicy" + "shortName": "GetDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.GetDnsThreatDetectorRequest" }, { "name": "name", @@ -388,14 +382,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy", - "shortName": "get_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "get_dns_threat_detector" }, - "description": "Sample for GetClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_async.py", + "description": "Sample for GetDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetClientTlsPolicy_async", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async", "segments": [ { "end": 51, @@ -428,28 +422,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_async.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", - "shortName": "NetworkSecurityClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient.get_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.GetDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "GetClientTlsPolicy" + "shortName": "GetDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.GetDnsThreatDetectorRequest" }, { "name": "name", @@ -468,14 +462,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy", - "shortName": "get_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "get_dns_threat_detector" }, - "description": "Sample for GetClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_sync.py", + "description": "Sample for GetDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetClientTlsPolicy_sync", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync", "segments": [ { "end": 51, @@ -508,29 +502,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_sync.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", - "shortName": "NetworkSecurityAsyncClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_client_tls_policies", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient.list_dns_threat_detectors", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListClientTlsPolicies", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.ListDnsThreatDetectors", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "ListClientTlsPolicies" + "shortName": "ListDnsThreatDetectors" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest" + "type": "google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest" }, { "name": "parent", @@ -549,14 +543,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesAsyncPager", - "shortName": "list_client_tls_policies" + "resultType": "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsAsyncPager", + "shortName": "list_dns_threat_detectors" }, - "description": "Sample for ListClientTlsPolicies", - "file": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_async.py", + "description": "Sample for ListDnsThreatDetectors", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListClientTlsPolicies_async", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async", "segments": [ { "end": 52, @@ -589,28 +583,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_async.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", - "shortName": "NetworkSecurityClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_client_tls_policies", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient.list_dns_threat_detectors", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListClientTlsPolicies", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.ListDnsThreatDetectors", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "ListClientTlsPolicies" + "shortName": "ListDnsThreatDetectors" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest" + "type": "google.cloud.network_security_v1alpha1.types.ListDnsThreatDetectorsRequest" }, { "name": "parent", @@ -629,14 +623,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesPager", - "shortName": "list_client_tls_policies" + "resultType": "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsPager", + "shortName": "list_dns_threat_detectors" }, - "description": "Sample for ListClientTlsPolicies", - "file": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_sync.py", + "description": "Sample for ListDnsThreatDetectors", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListClientTlsPolicies_sync", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync", "segments": [ { "end": 52, @@ -669,33 +663,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_sync.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", - "shortName": "NetworkSecurityAsyncClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceAsyncClient.update_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.UpdateDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "UpdateClientTlsPolicy" + "shortName": "UpdateDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.UpdateDnsThreatDetectorRequest" }, { - "name": "client_tls_policy", - "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector" }, { "name": "update_mask", @@ -714,22 +708,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "update_dns_threat_detector" }, - "description": "Sample for UpdateClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_async.py", + "description": "Sample for UpdateDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateClientTlsPolicy_async", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async", "segments": [ { - "end": 58, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 54, "start": 27, "type": "SHORT" }, @@ -744,42 +738,42 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_async.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", - "shortName": "NetworkSecurityClient" + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" }, - "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_client_tls_policy", + "fullName": "google.cloud.network_security_v1alpha1.DnsThreatDetectorServiceClient.update_dns_threat_detector", "method": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateClientTlsPolicy", + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService.UpdateDnsThreatDetector", "service": { - "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", - "shortName": "NetworkSecurity" + "fullName": "google.cloud.networksecurity.v1alpha1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" }, - "shortName": "UpdateClientTlsPolicy" + "shortName": "UpdateDnsThreatDetector" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest" + "type": "google.cloud.network_security_v1alpha1.types.UpdateDnsThreatDetectorRequest" }, { - "name": "client_tls_policy", - "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector" }, { "name": "update_mask", @@ -798,22 +792,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_client_tls_policy" + "resultType": "google.cloud.network_security_v1alpha1.types.DnsThreatDetector", + "shortName": "update_dns_threat_detector" }, - "description": "Sample for UpdateClientTlsPolicy", - "file": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_sync.py", + "description": "Sample for UpdateDnsThreatDetector", + "file": "networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateClientTlsPolicy_sync", + "regionTag": "networksecurity_v1alpha1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync", "segments": [ { - "end": 58, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 54, "start": 27, "type": "SHORT" }, @@ -828,17 +822,20557 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_sync.py" + "title": "networksecurity_v1alpha1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.create_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.CreateFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "CreateFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "firewall_endpoint_association", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation" + }, + { + "name": "firewall_endpoint_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_firewall_endpoint_association" + }, + "description": "Sample for CreateFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.create_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.CreateFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "CreateFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "firewall_endpoint_association", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation" + }, + { + "name": "firewall_endpoint_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_firewall_endpoint_association" + }, + "description": "Sample for CreateFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpointAssociation_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.create_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.CreateFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "CreateFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "firewall_endpoint", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint" + }, + { + "name": "firewall_endpoint_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_firewall_endpoint" + }, + "description": "Sample for CreateFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.create_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.CreateFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "CreateFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateFirewallEndpointRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "firewall_endpoint", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint" + }, + { + "name": "firewall_endpoint_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_firewall_endpoint" + }, + "description": "Sample for CreateFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_CreateFirewallEndpoint_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_create_firewall_endpoint_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.delete_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.DeleteFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "DeleteFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_firewall_endpoint_association" + }, + "description": "Sample for DeleteFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.delete_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.DeleteFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "DeleteFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_firewall_endpoint_association" + }, + "description": "Sample for DeleteFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpointAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.delete_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.DeleteFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "DeleteFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_firewall_endpoint" + }, + "description": "Sample for DeleteFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.delete_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.DeleteFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "DeleteFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteFirewallEndpointRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_firewall_endpoint" + }, + "description": "Sample for DeleteFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_DeleteFirewallEndpoint_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_delete_firewall_endpoint_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.get_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.GetFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "GetFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetFirewallEndpointAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation", + "shortName": "get_firewall_endpoint_association" + }, + "description": "Sample for GetFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.get_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.GetFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "GetFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetFirewallEndpointAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation", + "shortName": "get_firewall_endpoint_association" + }, + "description": "Sample for GetFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpointAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.get_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.GetFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "GetFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetFirewallEndpointRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint", + "shortName": "get_firewall_endpoint" + }, + "description": "Sample for GetFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.get_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.GetFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "GetFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetFirewallEndpointRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint", + "shortName": "get_firewall_endpoint" + }, + "description": "Sample for GetFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_GetFirewallEndpoint_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_get_firewall_endpoint_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.list_firewall_endpoint_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.ListFirewallEndpointAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "ListFirewallEndpointAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointAssociationsAsyncPager", + "shortName": "list_firewall_endpoint_associations" + }, + "description": "Sample for ListFirewallEndpointAssociations", + "file": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.list_firewall_endpoint_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.ListFirewallEndpointAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "ListFirewallEndpointAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListFirewallEndpointAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointAssociationsPager", + "shortName": "list_firewall_endpoint_associations" + }, + "description": "Sample for ListFirewallEndpointAssociations", + "file": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpointAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoint_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.list_firewall_endpoints", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.ListFirewallEndpoints", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "ListFirewallEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointsAsyncPager", + "shortName": "list_firewall_endpoints" + }, + "description": "Sample for ListFirewallEndpoints", + "file": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.list_firewall_endpoints", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.ListFirewallEndpoints", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "ListFirewallEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListFirewallEndpointsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.firewall_activation.pagers.ListFirewallEndpointsPager", + "shortName": "list_firewall_endpoints" + }, + "description": "Sample for ListFirewallEndpoints", + "file": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_ListFirewallEndpoints_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_list_firewall_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.update_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.UpdateFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "UpdateFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointAssociationRequest" + }, + { + "name": "firewall_endpoint_association", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_firewall_endpoint_association" + }, + "description": "Sample for UpdateFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.update_firewall_endpoint_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.UpdateFirewallEndpointAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "UpdateFirewallEndpointAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointAssociationRequest" + }, + { + "name": "firewall_endpoint_association", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpointAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_firewall_endpoint_association" + }, + "description": "Sample for UpdateFirewallEndpointAssociation", + "file": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpointAssociation_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient", + "shortName": "FirewallActivationAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationAsyncClient.update_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.UpdateFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "UpdateFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointRequest" + }, + { + "name": "firewall_endpoint", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_firewall_endpoint" + }, + "description": "Sample for UpdateFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient", + "shortName": "FirewallActivationClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.FirewallActivationClient.update_firewall_endpoint", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation.UpdateFirewallEndpoint", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.FirewallActivation", + "shortName": "FirewallActivation" + }, + "shortName": "UpdateFirewallEndpoint" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateFirewallEndpointRequest" + }, + { + "name": "firewall_endpoint", + "type": "google.cloud.network_security_v1alpha1.types.FirewallEndpoint" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_firewall_endpoint" + }, + "description": "Sample for UpdateFirewallEndpoint", + "file": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_FirewallActivation_UpdateFirewallEndpoint_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_firewall_activation_update_firewall_endpoint_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.create_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup" + }, + { + "name": "intercept_deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_intercept_deployment_group" + }, + "description": "Sample for CreateInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.create_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup" + }, + { + "name": "intercept_deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_intercept_deployment_group" + }, + "description": "Sample for CreateInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeploymentGroup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.create_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_deployment", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeployment" + }, + { + "name": "intercept_deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_intercept_deployment" + }, + "description": "Sample for CreateInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.create_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_deployment", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeployment" + }, + { + "name": "intercept_deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_intercept_deployment" + }, + "description": "Sample for CreateInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptDeployment_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.create_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation" + }, + { + "name": "intercept_endpoint_group_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_intercept_endpoint_group_association" + }, + "description": "Sample for CreateInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.create_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation" + }, + { + "name": "intercept_endpoint_group_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_intercept_endpoint_group_association" + }, + "description": "Sample for CreateInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroupAssociation_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.create_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup" + }, + { + "name": "intercept_endpoint_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_intercept_endpoint_group" + }, + "description": "Sample for CreateInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.create_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.CreateInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "CreateInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateInterceptEndpointGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intercept_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup" + }, + { + "name": "intercept_endpoint_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_intercept_endpoint_group" + }, + "description": "Sample for CreateInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_CreateInterceptEndpointGroup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_create_intercept_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.delete_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_intercept_deployment_group" + }, + "description": "Sample for DeleteInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.delete_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_intercept_deployment_group" + }, + "description": "Sample for DeleteInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeploymentGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.delete_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_intercept_deployment" + }, + "description": "Sample for DeleteInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.delete_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_intercept_deployment" + }, + "description": "Sample for DeleteInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptDeployment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.delete_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_intercept_endpoint_group_association" + }, + "description": "Sample for DeleteInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.delete_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_intercept_endpoint_group_association" + }, + "description": "Sample for DeleteInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroupAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.delete_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_intercept_endpoint_group" + }, + "description": "Sample for DeleteInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.delete_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.DeleteInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "DeleteInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteInterceptEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_intercept_endpoint_group" + }, + "description": "Sample for DeleteInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_DeleteInterceptEndpointGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_delete_intercept_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.get_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup", + "shortName": "get_intercept_deployment_group" + }, + "description": "Sample for GetInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.get_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup", + "shortName": "get_intercept_deployment_group" + }, + "description": "Sample for GetInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptDeploymentGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.get_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptDeployment", + "shortName": "get_intercept_deployment" + }, + "description": "Sample for GetInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.get_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptDeployment", + "shortName": "get_intercept_deployment" + }, + "description": "Sample for GetInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptDeployment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.get_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation", + "shortName": "get_intercept_endpoint_group_association" + }, + "description": "Sample for GetInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.get_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation", + "shortName": "get_intercept_endpoint_group_association" + }, + "description": "Sample for GetInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroupAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.get_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup", + "shortName": "get_intercept_endpoint_group" + }, + "description": "Sample for GetInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.get_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.GetInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "GetInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetInterceptEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup", + "shortName": "get_intercept_endpoint_group" + }, + "description": "Sample for GetInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_GetInterceptEndpointGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_get_intercept_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.list_intercept_deployment_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptDeploymentGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptDeploymentGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentGroupsAsyncPager", + "shortName": "list_intercept_deployment_groups" + }, + "description": "Sample for ListInterceptDeploymentGroups", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.list_intercept_deployment_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptDeploymentGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptDeploymentGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentGroupsPager", + "shortName": "list_intercept_deployment_groups" + }, + "description": "Sample for ListInterceptDeploymentGroups", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptDeploymentGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployment_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.list_intercept_deployments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptDeployments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentsAsyncPager", + "shortName": "list_intercept_deployments" + }, + "description": "Sample for ListInterceptDeployments", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.list_intercept_deployments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptDeployments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptDeploymentsPager", + "shortName": "list_intercept_deployments" + }, + "description": "Sample for ListInterceptDeployments", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptDeployments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_deployments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.list_intercept_endpoint_group_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptEndpointGroupAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptEndpointGroupAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupAssociationsAsyncPager", + "shortName": "list_intercept_endpoint_group_associations" + }, + "description": "Sample for ListInterceptEndpointGroupAssociations", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.list_intercept_endpoint_group_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptEndpointGroupAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptEndpointGroupAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupAssociationsPager", + "shortName": "list_intercept_endpoint_group_associations" + }, + "description": "Sample for ListInterceptEndpointGroupAssociations", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroupAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_group_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.list_intercept_endpoint_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptEndpointGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptEndpointGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupsAsyncPager", + "shortName": "list_intercept_endpoint_groups" + }, + "description": "Sample for ListInterceptEndpointGroups", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.list_intercept_endpoint_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.ListInterceptEndpointGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "ListInterceptEndpointGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListInterceptEndpointGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.intercept.pagers.ListInterceptEndpointGroupsPager", + "shortName": "list_intercept_endpoint_groups" + }, + "description": "Sample for ListInterceptEndpointGroups", + "file": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_ListInterceptEndpointGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_list_intercept_endpoint_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.update_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentGroupRequest" + }, + { + "name": "intercept_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_intercept_deployment_group" + }, + "description": "Sample for UpdateInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.update_intercept_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentGroupRequest" + }, + { + "name": "intercept_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_intercept_deployment_group" + }, + "description": "Sample for UpdateInterceptDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeploymentGroup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.update_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentRequest" + }, + { + "name": "intercept_deployment", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_intercept_deployment" + }, + "description": "Sample for UpdateInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.update_intercept_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptDeploymentRequest" + }, + { + "name": "intercept_deployment", + "type": "google.cloud.network_security_v1alpha1.types.InterceptDeployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_intercept_deployment" + }, + "description": "Sample for UpdateInterceptDeployment", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptDeployment_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.update_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupAssociationRequest" + }, + { + "name": "intercept_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_intercept_endpoint_group_association" + }, + "description": "Sample for UpdateInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.update_intercept_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupAssociationRequest" + }, + { + "name": "intercept_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroupAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_intercept_endpoint_group_association" + }, + "description": "Sample for UpdateInterceptEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroupAssociation_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient", + "shortName": "InterceptAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptAsyncClient.update_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupRequest" + }, + { + "name": "intercept_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_intercept_endpoint_group" + }, + "description": "Sample for UpdateInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient", + "shortName": "InterceptClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.InterceptClient.update_intercept_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept.UpdateInterceptEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Intercept", + "shortName": "Intercept" + }, + "shortName": "UpdateInterceptEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateInterceptEndpointGroupRequest" + }, + { + "name": "intercept_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.InterceptEndpointGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_intercept_endpoint_group" + }, + "description": "Sample for UpdateInterceptEndpointGroup", + "file": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Intercept_UpdateInterceptEndpointGroup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_intercept_update_intercept_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.create_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup" + }, + { + "name": "mirroring_deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_mirroring_deployment_group" + }, + "description": "Sample for CreateMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.create_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup" + }, + { + "name": "mirroring_deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_mirroring_deployment_group" + }, + "description": "Sample for CreateMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeploymentGroup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.create_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_deployment", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeployment" + }, + { + "name": "mirroring_deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_mirroring_deployment" + }, + "description": "Sample for CreateMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.create_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_deployment", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeployment" + }, + { + "name": "mirroring_deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_mirroring_deployment" + }, + "description": "Sample for CreateMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringDeployment_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.create_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation" + }, + { + "name": "mirroring_endpoint_group_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_mirroring_endpoint_group_association" + }, + "description": "Sample for CreateMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.create_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation" + }, + { + "name": "mirroring_endpoint_group_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_mirroring_endpoint_group_association" + }, + "description": "Sample for CreateMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroupAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.create_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup" + }, + { + "name": "mirroring_endpoint_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_mirroring_endpoint_group" + }, + "description": "Sample for CreateMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.create_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.CreateMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "CreateMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateMirroringEndpointGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mirroring_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup" + }, + { + "name": "mirroring_endpoint_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_mirroring_endpoint_group" + }, + "description": "Sample for CreateMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_CreateMirroringEndpointGroup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_create_mirroring_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.delete_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_mirroring_deployment_group" + }, + "description": "Sample for DeleteMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.delete_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_mirroring_deployment_group" + }, + "description": "Sample for DeleteMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeploymentGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.delete_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_mirroring_deployment" + }, + "description": "Sample for DeleteMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.delete_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_mirroring_deployment" + }, + "description": "Sample for DeleteMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringDeployment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.delete_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_mirroring_endpoint_group_association" + }, + "description": "Sample for DeleteMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.delete_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_mirroring_endpoint_group_association" + }, + "description": "Sample for DeleteMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroupAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.delete_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_mirroring_endpoint_group" + }, + "description": "Sample for DeleteMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.delete_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.DeleteMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "DeleteMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteMirroringEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_mirroring_endpoint_group" + }, + "description": "Sample for DeleteMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_DeleteMirroringEndpointGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_delete_mirroring_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.get_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup", + "shortName": "get_mirroring_deployment_group" + }, + "description": "Sample for GetMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.get_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup", + "shortName": "get_mirroring_deployment_group" + }, + "description": "Sample for GetMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeploymentGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.get_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringDeployment", + "shortName": "get_mirroring_deployment" + }, + "description": "Sample for GetMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.get_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringDeployment", + "shortName": "get_mirroring_deployment" + }, + "description": "Sample for GetMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringDeployment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.get_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation", + "shortName": "get_mirroring_endpoint_group_association" + }, + "description": "Sample for GetMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.get_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation", + "shortName": "get_mirroring_endpoint_group_association" + }, + "description": "Sample for GetMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroupAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.get_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup", + "shortName": "get_mirroring_endpoint_group" + }, + "description": "Sample for GetMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.get_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.GetMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "GetMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetMirroringEndpointGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup", + "shortName": "get_mirroring_endpoint_group" + }, + "description": "Sample for GetMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_GetMirroringEndpointGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_get_mirroring_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.list_mirroring_deployment_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringDeploymentGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringDeploymentGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentGroupsAsyncPager", + "shortName": "list_mirroring_deployment_groups" + }, + "description": "Sample for ListMirroringDeploymentGroups", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.list_mirroring_deployment_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringDeploymentGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringDeploymentGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentGroupsPager", + "shortName": "list_mirroring_deployment_groups" + }, + "description": "Sample for ListMirroringDeploymentGroups", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeploymentGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployment_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.list_mirroring_deployments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringDeployments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentsAsyncPager", + "shortName": "list_mirroring_deployments" + }, + "description": "Sample for ListMirroringDeployments", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.list_mirroring_deployments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringDeployments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringDeploymentsPager", + "shortName": "list_mirroring_deployments" + }, + "description": "Sample for ListMirroringDeployments", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringDeployments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_deployments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.list_mirroring_endpoint_group_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringEndpointGroupAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringEndpointGroupAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupAssociationsAsyncPager", + "shortName": "list_mirroring_endpoint_group_associations" + }, + "description": "Sample for ListMirroringEndpointGroupAssociations", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.list_mirroring_endpoint_group_associations", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringEndpointGroupAssociations", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringEndpointGroupAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupAssociationsPager", + "shortName": "list_mirroring_endpoint_group_associations" + }, + "description": "Sample for ListMirroringEndpointGroupAssociations", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroupAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_group_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.list_mirroring_endpoint_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringEndpointGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringEndpointGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupsAsyncPager", + "shortName": "list_mirroring_endpoint_groups" + }, + "description": "Sample for ListMirroringEndpointGroups", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.list_mirroring_endpoint_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.ListMirroringEndpointGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "ListMirroringEndpointGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListMirroringEndpointGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.mirroring.pagers.ListMirroringEndpointGroupsPager", + "shortName": "list_mirroring_endpoint_groups" + }, + "description": "Sample for ListMirroringEndpointGroups", + "file": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_ListMirroringEndpointGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_list_mirroring_endpoint_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.update_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentGroupRequest" + }, + { + "name": "mirroring_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_mirroring_deployment_group" + }, + "description": "Sample for UpdateMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.update_mirroring_deployment_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringDeploymentGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentGroupRequest" + }, + { + "name": "mirroring_deployment_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_mirroring_deployment_group" + }, + "description": "Sample for UpdateMirroringDeploymentGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeploymentGroup_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.update_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentRequest" + }, + { + "name": "mirroring_deployment", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_mirroring_deployment" + }, + "description": "Sample for UpdateMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.update_mirroring_deployment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringDeployment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringDeploymentRequest" + }, + { + "name": "mirroring_deployment", + "type": "google.cloud.network_security_v1alpha1.types.MirroringDeployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_mirroring_deployment" + }, + "description": "Sample for UpdateMirroringDeployment", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringDeployment_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.update_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupAssociationRequest" + }, + { + "name": "mirroring_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_mirroring_endpoint_group_association" + }, + "description": "Sample for UpdateMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.update_mirroring_endpoint_group_association", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringEndpointGroupAssociation", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringEndpointGroupAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupAssociationRequest" + }, + { + "name": "mirroring_endpoint_group_association", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroupAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_mirroring_endpoint_group_association" + }, + "description": "Sample for UpdateMirroringEndpointGroupAssociation", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroupAssociation_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient", + "shortName": "MirroringAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringAsyncClient.update_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupRequest" + }, + { + "name": "mirroring_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_mirroring_endpoint_group" + }, + "description": "Sample for UpdateMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient", + "shortName": "MirroringClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.MirroringClient.update_mirroring_endpoint_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring.UpdateMirroringEndpointGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.Mirroring", + "shortName": "Mirroring" + }, + "shortName": "UpdateMirroringEndpointGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateMirroringEndpointGroupRequest" + }, + { + "name": "mirroring_endpoint_group", + "type": "google.cloud.network_security_v1alpha1.types.MirroringEndpointGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_mirroring_endpoint_group" + }, + "description": "Sample for UpdateMirroringEndpointGroup", + "file": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_Mirroring_UpdateMirroringEndpointGroup_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_mirroring_update_mirroring_endpoint_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateAuthorizationPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "authorization_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy" + }, + { + "name": "authorization_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_authorization_policy" + }, + "description": "Sample for CreateAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_authorization_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateAuthorizationPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "authorization_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy" + }, + { + "name": "authorization_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_authorization_policy" + }, + "description": "Sample for CreateAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthorizationPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_authorization_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateAuthzPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "authz_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthzPolicy" + }, + { + "name": "authz_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_authz_policy" + }, + "description": "Sample for CreateAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_authz_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateAuthzPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "authz_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthzPolicy" + }, + { + "name": "authz_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_authz_policy" + }, + "description": "Sample for CreateAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateAuthzPolicy_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_authz_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateBackendAuthenticationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backend_authentication_config", + "type": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig" + }, + { + "name": "backend_authentication_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backend_authentication_config" + }, + "description": "Sample for CreateBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateBackendAuthenticationConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backend_authentication_config", + "type": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig" + }, + { + "name": "backend_authentication_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backend_authentication_config" + }, + "description": "Sample for CreateBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateBackendAuthenticationConfig_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_backend_authentication_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "client_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + }, + { + "name": "client_tls_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_client_tls_policy" + }, + "description": "Sample for CreateClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateClientTlsPolicy_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateClientTlsPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "client_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + }, + { + "name": "client_tls_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_client_tls_policy" + }, + "description": "Sample for CreateClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateClientTlsPolicy_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_client_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRuleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway_security_policy_rule", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule" + }, + { + "name": "gateway_security_policy_rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_gateway_security_policy_rule" + }, + "description": "Sample for CreateGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRuleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway_security_policy_rule", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule" + }, + { + "name": "gateway_security_policy_rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_gateway_security_policy_rule" + }, + "description": "Sample for CreateGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicyRule_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway_security_policy", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy" + }, + { + "name": "gateway_security_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_gateway_security_policy" + }, + "description": "Sample for CreateGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateGatewaySecurityPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway_security_policy", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy" + }, + { + "name": "gateway_security_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_gateway_security_policy" + }, + "description": "Sample for CreateGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateGatewaySecurityPolicy_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_gateway_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateServerTlsPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "server_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy" + }, + { + "name": "server_tls_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_server_tls_policy" + }, + "description": "Sample for CreateServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateServerTlsPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "server_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy" + }, + { + "name": "server_tls_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_server_tls_policy" + }, + "description": "Sample for CreateServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateServerTlsPolicy_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_server_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateTlsInspectionPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tls_inspection_policy", + "type": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy" + }, + { + "name": "tls_inspection_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_tls_inspection_policy" + }, + "description": "Sample for CreateTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateTlsInspectionPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tls_inspection_policy", + "type": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy" + }, + { + "name": "tls_inspection_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_tls_inspection_policy" + }, + "description": "Sample for CreateTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateTlsInspectionPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_tls_inspection_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.create_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateUrlListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "url_list", + "type": "google.cloud.network_security_v1alpha1.types.UrlList" + }, + { + "name": "url_list_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_url_list" + }, + "description": "Sample for CreateUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_create_url_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_url_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.create_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.CreateUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "CreateUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateUrlListRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "url_list", + "type": "google.cloud.network_security_v1alpha1.types.UrlList" + }, + { + "name": "url_list_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_url_list" + }, + "description": "Sample for CreateUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_CreateUrlList_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_create_url_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteAuthorizationPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_authorization_policy" + }, + "description": "Sample for DeleteAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteAuthorizationPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_authorization_policy" + }, + "description": "Sample for DeleteAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthorizationPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_authorization_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteAuthzPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_authz_policy" + }, + "description": "Sample for DeleteAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_authz_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteAuthzPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_authz_policy" + }, + "description": "Sample for DeleteAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteAuthzPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_authz_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteBackendAuthenticationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backend_authentication_config" + }, + "description": "Sample for DeleteBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteBackendAuthenticationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backend_authentication_config" + }, + "description": "Sample for DeleteBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteBackendAuthenticationConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_backend_authentication_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_client_tls_policy" + }, + "description": "Sample for DeleteClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteClientTlsPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteClientTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_client_tls_policy" + }, + "description": "Sample for DeleteClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteClientTlsPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_client_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_gateway_security_policy_rule" + }, + "description": "Sample for DeleteGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_gateway_security_policy_rule" + }, + "description": "Sample for DeleteGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicyRule_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_gateway_security_policy" + }, + "description": "Sample for DeleteGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteGatewaySecurityPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_gateway_security_policy" + }, + "description": "Sample for DeleteGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteGatewaySecurityPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_gateway_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteServerTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_server_tls_policy" + }, + "description": "Sample for DeleteServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteServerTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_server_tls_policy" + }, + "description": "Sample for DeleteServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteServerTlsPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_server_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteTlsInspectionPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_tls_inspection_policy" + }, + "description": "Sample for DeleteTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteTlsInspectionPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_tls_inspection_policy" + }, + "description": "Sample for DeleteTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteTlsInspectionPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_tls_inspection_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.delete_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteUrlListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_url_list" + }, + "description": "Sample for DeleteUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_url_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.delete_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.DeleteUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "DeleteUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteUrlListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_url_list" + }, + "description": "Sample for DeleteUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_DeleteUrlList_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_delete_url_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetAuthorizationPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy", + "shortName": "get_authorization_policy" + }, + "description": "Sample for GetAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_authorization_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetAuthorizationPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy", + "shortName": "get_authorization_policy" + }, + "description": "Sample for GetAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthorizationPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_authorization_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetAuthzPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.AuthzPolicy", + "shortName": "get_authz_policy" + }, + "description": "Sample for GetAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_authz_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetAuthzPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.AuthzPolicy", + "shortName": "get_authz_policy" + }, + "description": "Sample for GetAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetAuthzPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_authz_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetBackendAuthenticationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig", + "shortName": "get_backend_authentication_config" + }, + "description": "Sample for GetBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetBackendAuthenticationConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig", + "shortName": "get_backend_authentication_config" + }, + "description": "Sample for GetBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetBackendAuthenticationConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_backend_authentication_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy", + "shortName": "get_client_tls_policy" + }, + "description": "Sample for GetClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetClientTlsPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetClientTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy", + "shortName": "get_client_tls_policy" + }, + "description": "Sample for GetClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetClientTlsPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_client_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule", + "shortName": "get_gateway_security_policy_rule" + }, + "description": "Sample for GetGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRuleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule", + "shortName": "get_gateway_security_policy_rule" + }, + "description": "Sample for GetGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicyRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy", + "shortName": "get_gateway_security_policy" + }, + "description": "Sample for GetGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetGatewaySecurityPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy", + "shortName": "get_gateway_security_policy" + }, + "description": "Sample for GetGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetGatewaySecurityPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_gateway_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetServerTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy", + "shortName": "get_server_tls_policy" + }, + "description": "Sample for GetServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetServerTlsPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy", + "shortName": "get_server_tls_policy" + }, + "description": "Sample for GetServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetServerTlsPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_server_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetTlsInspectionPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy", + "shortName": "get_tls_inspection_policy" + }, + "description": "Sample for GetTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetTlsInspectionPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy", + "shortName": "get_tls_inspection_policy" + }, + "description": "Sample for GetTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetTlsInspectionPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_tls_inspection_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.get_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetUrlListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.UrlList", + "shortName": "get_url_list" + }, + "description": "Sample for GetUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_get_url_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_url_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.get_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.GetUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "GetUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetUrlListRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.UrlList", + "shortName": "get_url_list" + }, + "description": "Sample for GetUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_GetUrlList_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_get_url_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_authorization_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListAuthorizationPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListAuthorizationPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthorizationPoliciesAsyncPager", + "shortName": "list_authorization_policies" + }, + "description": "Sample for ListAuthorizationPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_authorization_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_authorization_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListAuthorizationPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListAuthorizationPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListAuthorizationPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthorizationPoliciesPager", + "shortName": "list_authorization_policies" + }, + "description": "Sample for ListAuthorizationPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthorizationPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_authorization_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_authz_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListAuthzPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListAuthzPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthzPoliciesAsyncPager", + "shortName": "list_authz_policies" + }, + "description": "Sample for ListAuthzPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_authz_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_authz_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListAuthzPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListAuthzPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListAuthzPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListAuthzPoliciesPager", + "shortName": "list_authz_policies" + }, + "description": "Sample for ListAuthzPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListAuthzPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_authz_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_backend_authentication_configs", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListBackendAuthenticationConfigs", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListBackendAuthenticationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListBackendAuthenticationConfigsAsyncPager", + "shortName": "list_backend_authentication_configs" + }, + "description": "Sample for ListBackendAuthenticationConfigs", + "file": "networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_backend_authentication_configs", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListBackendAuthenticationConfigs", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListBackendAuthenticationConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListBackendAuthenticationConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListBackendAuthenticationConfigsPager", + "shortName": "list_backend_authentication_configs" + }, + "description": "Sample for ListBackendAuthenticationConfigs", + "file": "networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListBackendAuthenticationConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_backend_authentication_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_client_tls_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListClientTlsPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListClientTlsPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesAsyncPager", + "shortName": "list_client_tls_policies" + }, + "description": "Sample for ListClientTlsPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListClientTlsPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_client_tls_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListClientTlsPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListClientTlsPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListClientTlsPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListClientTlsPoliciesPager", + "shortName": "list_client_tls_policies" + }, + "description": "Sample for ListClientTlsPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListClientTlsPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_client_tls_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_gateway_security_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListGatewaySecurityPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListGatewaySecurityPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPoliciesAsyncPager", + "shortName": "list_gateway_security_policies" + }, + "description": "Sample for ListGatewaySecurityPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_gateway_security_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListGatewaySecurityPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListGatewaySecurityPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPoliciesPager", + "shortName": "list_gateway_security_policies" + }, + "description": "Sample for ListGatewaySecurityPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_gateway_security_policy_rules", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListGatewaySecurityPolicyRules", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListGatewaySecurityPolicyRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPolicyRulesAsyncPager", + "shortName": "list_gateway_security_policy_rules" + }, + "description": "Sample for ListGatewaySecurityPolicyRules", + "file": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_gateway_security_policy_rules", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListGatewaySecurityPolicyRules", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListGatewaySecurityPolicyRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListGatewaySecurityPolicyRulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListGatewaySecurityPolicyRulesPager", + "shortName": "list_gateway_security_policy_rules" + }, + "description": "Sample for ListGatewaySecurityPolicyRules", + "file": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListGatewaySecurityPolicyRules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_gateway_security_policy_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_server_tls_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListServerTlsPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListServerTlsPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListServerTlsPoliciesAsyncPager", + "shortName": "list_server_tls_policies" + }, + "description": "Sample for ListServerTlsPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_server_tls_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListServerTlsPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListServerTlsPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListServerTlsPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListServerTlsPoliciesPager", + "shortName": "list_server_tls_policies" + }, + "description": "Sample for ListServerTlsPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListServerTlsPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_server_tls_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_tls_inspection_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListTlsInspectionPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListTlsInspectionPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListTlsInspectionPoliciesAsyncPager", + "shortName": "list_tls_inspection_policies" + }, + "description": "Sample for ListTlsInspectionPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_tls_inspection_policies", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListTlsInspectionPolicies", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListTlsInspectionPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListTlsInspectionPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListTlsInspectionPoliciesPager", + "shortName": "list_tls_inspection_policies" + }, + "description": "Sample for ListTlsInspectionPolicies", + "file": "networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListTlsInspectionPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_tls_inspection_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.list_url_lists", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListUrlLists", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListUrlLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListUrlListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListUrlListsAsyncPager", + "shortName": "list_url_lists" + }, + "description": "Sample for ListUrlLists", + "file": "networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_url_lists_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.list_url_lists", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.ListUrlLists", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "ListUrlLists" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListUrlListsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.network_security.pagers.ListUrlListsPager", + "shortName": "list_url_lists" + }, + "description": "Sample for ListUrlLists", + "file": "networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_ListUrlLists_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_list_url_lists_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateAuthorizationPolicyRequest" + }, + { + "name": "authorization_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_authorization_policy" + }, + "description": "Sample for UpdateAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_authorization_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_authorization_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateAuthorizationPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateAuthorizationPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateAuthorizationPolicyRequest" + }, + { + "name": "authorization_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthorizationPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_authorization_policy" + }, + "description": "Sample for UpdateAuthorizationPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthorizationPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_authorization_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateAuthzPolicyRequest" + }, + { + "name": "authz_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthzPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_authz_policy" + }, + "description": "Sample for UpdateAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_authz_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_authz_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateAuthzPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateAuthzPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateAuthzPolicyRequest" + }, + { + "name": "authz_policy", + "type": "google.cloud.network_security_v1alpha1.types.AuthzPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_authz_policy" + }, + "description": "Sample for UpdateAuthzPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateAuthzPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_authz_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateBackendAuthenticationConfigRequest" + }, + { + "name": "backend_authentication_config", + "type": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backend_authentication_config" + }, + "description": "Sample for UpdateBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_backend_authentication_config", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateBackendAuthenticationConfig", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateBackendAuthenticationConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateBackendAuthenticationConfigRequest" + }, + { + "name": "backend_authentication_config", + "type": "google.cloud.network_security_v1alpha1.types.BackendAuthenticationConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backend_authentication_config" + }, + "description": "Sample for UpdateBackendAuthenticationConfig", + "file": "networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateBackendAuthenticationConfig_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_backend_authentication_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest" + }, + { + "name": "client_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_client_tls_policy" + }, + "description": "Sample for UpdateClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateClientTlsPolicy_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_client_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateClientTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateClientTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateClientTlsPolicyRequest" + }, + { + "name": "client_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ClientTlsPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_client_tls_policy" + }, + "description": "Sample for UpdateClientTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateClientTlsPolicy_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_client_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRuleRequest" + }, + { + "name": "gateway_security_policy_rule", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_gateway_security_policy_rule" + }, + "description": "Sample for UpdateGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_gateway_security_policy_rule", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateGatewaySecurityPolicyRule", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateGatewaySecurityPolicyRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRuleRequest" + }, + { + "name": "gateway_security_policy_rule", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicyRule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_gateway_security_policy_rule" + }, + "description": "Sample for UpdateGatewaySecurityPolicyRule", + "file": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicyRule_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRequest" + }, + { + "name": "gateway_security_policy", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_gateway_security_policy" + }, + "description": "Sample for UpdateGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_gateway_security_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateGatewaySecurityPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateGatewaySecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateGatewaySecurityPolicyRequest" + }, + { + "name": "gateway_security_policy", + "type": "google.cloud.network_security_v1alpha1.types.GatewaySecurityPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_gateway_security_policy" + }, + "description": "Sample for UpdateGatewaySecurityPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateGatewaySecurityPolicy_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_gateway_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateServerTlsPolicyRequest" + }, + { + "name": "server_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_server_tls_policy" + }, + "description": "Sample for UpdateServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_server_tls_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateServerTlsPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateServerTlsPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateServerTlsPolicyRequest" + }, + { + "name": "server_tls_policy", + "type": "google.cloud.network_security_v1alpha1.types.ServerTlsPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_server_tls_policy" + }, + "description": "Sample for UpdateServerTlsPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateServerTlsPolicy_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_server_tls_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateTlsInspectionPolicyRequest" + }, + { + "name": "tls_inspection_policy", + "type": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_tls_inspection_policy" + }, + "description": "Sample for UpdateTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_tls_inspection_policy", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateTlsInspectionPolicy", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateTlsInspectionPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateTlsInspectionPolicyRequest" + }, + { + "name": "tls_inspection_policy", + "type": "google.cloud.network_security_v1alpha1.types.TlsInspectionPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_tls_inspection_policy" + }, + "description": "Sample for UpdateTlsInspectionPolicy", + "file": "networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateTlsInspectionPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_tls_inspection_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient", + "shortName": "NetworkSecurityAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityAsyncClient.update_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateUrlListRequest" + }, + { + "name": "url_list", + "type": "google.cloud.network_security_v1alpha1.types.UrlList" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_url_list" + }, + "description": "Sample for UpdateUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_update_url_list_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_url_list_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient", + "shortName": "NetworkSecurityClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.NetworkSecurityClient.update_url_list", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity.UpdateUrlList", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.NetworkSecurity", + "shortName": "NetworkSecurity" + }, + "shortName": "UpdateUrlList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateUrlListRequest" + }, + { + "name": "url_list", + "type": "google.cloud.network_security_v1alpha1.types.UrlList" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_url_list" + }, + "description": "Sample for UpdateUrlList", + "file": "networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_NetworkSecurity_UpdateUrlList_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_network_security_update_url_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.create_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.CreateSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "CreateSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSecurityProfileGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "security_profile_group", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup" + }, + { + "name": "security_profile_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_security_profile_group" + }, + "description": "Sample for CreateSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.create_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.CreateSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "CreateSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSecurityProfileGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "security_profile_group", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup" + }, + { + "name": "security_profile_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_security_profile_group" + }, + "description": "Sample for CreateSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfileGroup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.create_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.CreateSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "CreateSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSecurityProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "security_profile", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfile" + }, + { + "name": "security_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_security_profile" + }, + "description": "Sample for CreateSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.create_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.CreateSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "CreateSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSecurityProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "security_profile", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfile" + }, + { + "name": "security_profile_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_security_profile" + }, + "description": "Sample for CreateSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_CreateSecurityProfile_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_create_security_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.delete_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.DeleteSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "DeleteSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_security_profile_group" + }, + "description": "Sample for DeleteSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.delete_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.DeleteSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "DeleteSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_security_profile_group" + }, + "description": "Sample for DeleteSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfileGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.delete_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.DeleteSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "DeleteSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_security_profile" + }, + "description": "Sample for DeleteSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.delete_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.DeleteSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "DeleteSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSecurityProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_security_profile" + }, + "description": "Sample for DeleteSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_DeleteSecurityProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_delete_security_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.get_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.GetSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "GetSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSecurityProfileGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup", + "shortName": "get_security_profile_group" + }, + "description": "Sample for GetSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.get_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.GetSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "GetSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSecurityProfileGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup", + "shortName": "get_security_profile_group" + }, + "description": "Sample for GetSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfileGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.get_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.GetSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "GetSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSecurityProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SecurityProfile", + "shortName": "get_security_profile" + }, + "description": "Sample for GetSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.get_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.GetSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "GetSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSecurityProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SecurityProfile", + "shortName": "get_security_profile" + }, + "description": "Sample for GetSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_GetSecurityProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_get_security_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.list_security_profile_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.ListSecurityProfileGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "ListSecurityProfileGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfileGroupsAsyncPager", + "shortName": "list_security_profile_groups" + }, + "description": "Sample for ListSecurityProfileGroups", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.list_security_profile_groups", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.ListSecurityProfileGroups", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "ListSecurityProfileGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSecurityProfileGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfileGroupsPager", + "shortName": "list_security_profile_groups" + }, + "description": "Sample for ListSecurityProfileGroups", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfileGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profile_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.list_security_profiles", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.ListSecurityProfiles", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "ListSecurityProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfilesAsyncPager", + "shortName": "list_security_profiles" + }, + "description": "Sample for ListSecurityProfiles", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.list_security_profiles", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.ListSecurityProfiles", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "ListSecurityProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSecurityProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.pagers.ListSecurityProfilesPager", + "shortName": "list_security_profiles" + }, + "description": "Sample for ListSecurityProfiles", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_ListSecurityProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_list_security_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.update_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.UpdateSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "UpdateSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileGroupRequest" + }, + { + "name": "security_profile_group", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_security_profile_group" + }, + "description": "Sample for UpdateSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.update_security_profile_group", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.UpdateSecurityProfileGroup", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "UpdateSecurityProfileGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileGroupRequest" + }, + { + "name": "security_profile_group", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfileGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_security_profile_group" + }, + "description": "Sample for UpdateSecurityProfileGroup", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfileGroup_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient", + "shortName": "OrganizationSecurityProfileGroupServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceAsyncClient.update_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.UpdateSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "UpdateSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileRequest" + }, + { + "name": "security_profile", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_security_profile" + }, + "description": "Sample for UpdateSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient", + "shortName": "OrganizationSecurityProfileGroupServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.OrganizationSecurityProfileGroupServiceClient.update_security_profile", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService.UpdateSecurityProfile", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.OrganizationSecurityProfileGroupService", + "shortName": "OrganizationSecurityProfileGroupService" + }, + "shortName": "UpdateSecurityProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdateSecurityProfileRequest" + }, + { + "name": "security_profile", + "type": "google.cloud.network_security_v1alpha1.types.SecurityProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_security_profile" + }, + "description": "Sample for UpdateSecurityProfile", + "file": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_OrganizationSecurityProfileGroupService_UpdateSecurityProfile_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_organization_security_profile_group_service_update_security_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.create_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.CreatePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "CreatePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreatePartnerSSEGatewayRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partner_sse_gateway", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway" + }, + { + "name": "partner_sse_gateway_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_partner_sse_gateway" + }, + "description": "Sample for CreatePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.create_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.CreatePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "CreatePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreatePartnerSSEGatewayRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partner_sse_gateway", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway" + }, + { + "name": "partner_sse_gateway_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_partner_sse_gateway" + }, + "description": "Sample for CreatePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_CreatePartnerSSEGateway_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_create_partner_sse_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.delete_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.DeletePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "DeletePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeletePartnerSSEGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_partner_sse_gateway" + }, + "description": "Sample for DeletePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.delete_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.DeletePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "DeletePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeletePartnerSSEGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_partner_sse_gateway" + }, + "description": "Sample for DeletePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_DeletePartnerSSEGateway_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_delete_partner_sse_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.get_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.GetPartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "GetPartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetPartnerSSEGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway", + "shortName": "get_partner_sse_gateway" + }, + "description": "Sample for GetPartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.get_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.GetPartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "GetPartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetPartnerSSEGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway", + "shortName": "get_partner_sse_gateway" + }, + "description": "Sample for GetPartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_GetPartnerSSEGateway_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_get_partner_sse_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.get_sse_gateway_reference", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.GetSSEGatewayReference", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "GetSSEGatewayReference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSSEGatewayReferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SSEGatewayReference", + "shortName": "get_sse_gateway_reference" + }, + "description": "Sample for GetSSEGatewayReference", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.get_sse_gateway_reference", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.GetSSEGatewayReference", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "GetSSEGatewayReference" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSSEGatewayReferenceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SSEGatewayReference", + "shortName": "get_sse_gateway_reference" + }, + "description": "Sample for GetSSEGatewayReference", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_GetSSEGatewayReference_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_get_sse_gateway_reference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.list_partner_sse_gateways", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.ListPartnerSSEGateways", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "ListPartnerSSEGateways" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListPartnerSSEGatewaysAsyncPager", + "shortName": "list_partner_sse_gateways" + }, + "description": "Sample for ListPartnerSSEGateways", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.list_partner_sse_gateways", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.ListPartnerSSEGateways", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "ListPartnerSSEGateways" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListPartnerSSEGatewaysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListPartnerSSEGatewaysPager", + "shortName": "list_partner_sse_gateways" + }, + "description": "Sample for ListPartnerSSEGateways", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_ListPartnerSSEGateways_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_list_partner_sse_gateways_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.list_sse_gateway_references", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.ListSSEGatewayReferences", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "ListSSEGatewayReferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListSSEGatewayReferencesAsyncPager", + "shortName": "list_sse_gateway_references" + }, + "description": "Sample for ListSSEGatewayReferences", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.list_sse_gateway_references", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.ListSSEGatewayReferences", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "ListSSEGatewayReferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSSEGatewayReferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_gateway_service.pagers.ListSSEGatewayReferencesPager", + "shortName": "list_sse_gateway_references" + }, + "description": "Sample for ListSSEGatewayReferences", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_ListSSEGatewayReferences_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_list_sse_gateway_references_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient", + "shortName": "SSEGatewayServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceAsyncClient.update_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.UpdatePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "UpdatePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdatePartnerSSEGatewayRequest" + }, + { + "name": "partner_sse_gateway", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_partner_sse_gateway" + }, + "description": "Sample for UpdatePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient", + "shortName": "SSEGatewayServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSEGatewayServiceClient.update_partner_sse_gateway", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService.UpdatePartnerSSEGateway", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSEGatewayService", + "shortName": "SSEGatewayService" + }, + "shortName": "UpdatePartnerSSEGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.UpdatePartnerSSEGatewayRequest" + }, + { + "name": "partner_sse_gateway", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSEGateway" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_partner_sse_gateway" + }, + "description": "Sample for UpdatePartnerSSEGateway", + "file": "networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSEGatewayService_UpdatePartnerSSEGateway_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_gateway_service_update_partner_sse_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.create_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreatePartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreatePartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreatePartnerSSERealmRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partner_sse_realm", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSERealm" + }, + { + "name": "partner_sse_realm_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_partner_sse_realm" + }, + "description": "Sample for CreatePartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.create_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreatePartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreatePartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreatePartnerSSERealmRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "partner_sse_realm", + "type": "google.cloud.network_security_v1alpha1.types.PartnerSSERealm" + }, + { + "name": "partner_sse_realm_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_partner_sse_realm" + }, + "description": "Sample for CreatePartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreatePartnerSSERealm_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_partner_sse_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.create_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreateSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreateSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSACAttachmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sac_attachment", + "type": "google.cloud.network_security_v1alpha1.types.SACAttachment" + }, + { + "name": "sac_attachment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_sac_attachment" + }, + "description": "Sample for CreateSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.create_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreateSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreateSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSACAttachmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sac_attachment", + "type": "google.cloud.network_security_v1alpha1.types.SACAttachment" + }, + { + "name": "sac_attachment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_sac_attachment" + }, + "description": "Sample for CreateSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreateSACAttachment_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.create_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreateSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreateSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSACRealmRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sac_realm", + "type": "google.cloud.network_security_v1alpha1.types.SACRealm" + }, + { + "name": "sac_realm_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_sac_realm" + }, + "description": "Sample for CreateSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.create_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.CreateSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "CreateSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.CreateSACRealmRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sac_realm", + "type": "google.cloud.network_security_v1alpha1.types.SACRealm" + }, + { + "name": "sac_realm_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_sac_realm" + }, + "description": "Sample for CreateSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_CreateSACRealm_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_create_sac_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.delete_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeletePartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeletePartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeletePartnerSSERealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_partner_sse_realm" + }, + "description": "Sample for DeletePartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.delete_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeletePartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeletePartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeletePartnerSSERealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_partner_sse_realm" + }, + "description": "Sample for DeletePartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeletePartnerSSERealm_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_partner_sse_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.delete_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeleteSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeleteSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSACAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_sac_attachment" + }, + "description": "Sample for DeleteSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.delete_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeleteSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeleteSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSACAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_sac_attachment" + }, + "description": "Sample for DeleteSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeleteSACAttachment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.delete_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeleteSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeleteSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSACRealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_sac_realm" + }, + "description": "Sample for DeleteSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.delete_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.DeleteSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "DeleteSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.DeleteSACRealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_sac_realm" + }, + "description": "Sample for DeleteSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_DeleteSACRealm_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_delete_sac_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.get_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetPartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetPartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetPartnerSSERealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.PartnerSSERealm", + "shortName": "get_partner_sse_realm" + }, + "description": "Sample for GetPartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.get_partner_sse_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetPartnerSSERealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetPartnerSSERealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetPartnerSSERealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.PartnerSSERealm", + "shortName": "get_partner_sse_realm" + }, + "description": "Sample for GetPartnerSSERealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetPartnerSSERealm_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_partner_sse_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.get_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSACAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SACAttachment", + "shortName": "get_sac_attachment" + }, + "description": "Sample for GetSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.get_sac_attachment", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetSACAttachment", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetSACAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSACAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SACAttachment", + "shortName": "get_sac_attachment" + }, + "description": "Sample for GetSACAttachment", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetSACAttachment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_attachment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.get_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSACRealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SACRealm", + "shortName": "get_sac_realm" + }, + "description": "Sample for GetSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.get_sac_realm", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.GetSACRealm", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "GetSACRealm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.GetSACRealmRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.types.SACRealm", + "shortName": "get_sac_realm" + }, + "description": "Sample for GetSACRealm", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_GetSACRealm_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_get_sac_realm_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.list_partner_sse_realms", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListPartnerSSERealms", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListPartnerSSERealms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListPartnerSSERealmsAsyncPager", + "shortName": "list_partner_sse_realms" + }, + "description": "Sample for ListPartnerSSERealms", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.list_partner_sse_realms", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListPartnerSSERealms", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListPartnerSSERealms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListPartnerSSERealmsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListPartnerSSERealmsPager", + "shortName": "list_partner_sse_realms" + }, + "description": "Sample for ListPartnerSSERealms", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListPartnerSSERealms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_partner_sse_realms_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.list_sac_attachments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListSACAttachments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListSACAttachments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACAttachmentsAsyncPager", + "shortName": "list_sac_attachments" + }, + "description": "Sample for ListSACAttachments", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.list_sac_attachments", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListSACAttachments", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListSACAttachments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSACAttachmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACAttachmentsPager", + "shortName": "list_sac_attachments" + }, + "description": "Sample for ListSACAttachments", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListSACAttachments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_attachments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient", + "shortName": "SSERealmServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceAsyncClient.list_sac_realms", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListSACRealms", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListSACRealms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACRealmsAsyncPager", + "shortName": "list_sac_realms" + }, + "description": "Sample for ListSACRealms", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient", + "shortName": "SSERealmServiceClient" + }, + "fullName": "google.cloud.network_security_v1alpha1.SSERealmServiceClient.list_sac_realms", + "method": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService.ListSACRealms", + "service": { + "fullName": "google.cloud.networksecurity.v1alpha1.SSERealmService", + "shortName": "SSERealmService" + }, + "shortName": "ListSACRealms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1alpha1.types.ListSACRealmsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1alpha1.services.sse_realm_service.pagers.ListSACRealmsPager", + "shortName": "list_sac_realms" + }, + "description": "Sample for ListSACRealms", + "file": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1alpha1_generated_SSERealmService_ListSACRealms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1alpha1_generated_sse_realm_service_list_sac_realms_sync.py" } ] } diff --git a/packages/google-cloud-network-security/scripts/fixup_network_security_v1alpha1_keywords.py b/packages/google-cloud-network-security/scripts/fixup_network_security_v1alpha1_keywords.py index f2a9d62fb02b..d7d1e2fd45f8 100644 --- a/packages/google-cloud-network-security/scripts/fixup_network_security_v1alpha1_keywords.py +++ b/packages/google-cloud-network-security/scripts/fixup_network_security_v1alpha1_keywords.py @@ -39,11 +39,135 @@ def partition( class network_securityCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_authorization_policy': ('parent', 'authorization_policy_id', 'authorization_policy', ), + 'create_authz_policy': ('parent', 'authz_policy_id', 'authz_policy', 'request_id', ), + 'create_backend_authentication_config': ('parent', 'backend_authentication_config_id', 'backend_authentication_config', ), 'create_client_tls_policy': ('parent', 'client_tls_policy_id', 'client_tls_policy', ), + 'create_dns_threat_detector': ('parent', 'dns_threat_detector', 'dns_threat_detector_id', ), + 'create_firewall_endpoint': ('parent', 'firewall_endpoint_id', 'firewall_endpoint', 'request_id', ), + 'create_firewall_endpoint_association': ('parent', 'firewall_endpoint_association', 'firewall_endpoint_association_id', 'request_id', ), + 'create_gateway_security_policy': ('parent', 'gateway_security_policy_id', 'gateway_security_policy', ), + 'create_gateway_security_policy_rule': ('parent', 'gateway_security_policy_rule', 'gateway_security_policy_rule_id', ), + 'create_intercept_deployment': ('parent', 'intercept_deployment_id', 'intercept_deployment', 'request_id', ), + 'create_intercept_deployment_group': ('parent', 'intercept_deployment_group_id', 'intercept_deployment_group', 'request_id', ), + 'create_intercept_endpoint_group': ('parent', 'intercept_endpoint_group_id', 'intercept_endpoint_group', 'request_id', ), + 'create_intercept_endpoint_group_association': ('parent', 'intercept_endpoint_group_association', 'intercept_endpoint_group_association_id', 'request_id', ), + 'create_mirroring_deployment': ('parent', 'mirroring_deployment_id', 'mirroring_deployment', 'request_id', ), + 'create_mirroring_deployment_group': ('parent', 'mirroring_deployment_group_id', 'mirroring_deployment_group', 'request_id', ), + 'create_mirroring_endpoint_group': ('parent', 'mirroring_endpoint_group_id', 'mirroring_endpoint_group', 'request_id', ), + 'create_mirroring_endpoint_group_association': ('parent', 'mirroring_endpoint_group_association', 'mirroring_endpoint_group_association_id', 'request_id', ), + 'create_partner_sse_gateway': ('parent', 'partner_sse_gateway_id', 'partner_sse_gateway', 'request_id', ), + 'create_partner_sse_realm': ('parent', 'partner_sse_realm_id', 'partner_sse_realm', 'request_id', ), + 'create_sac_attachment': ('parent', 'sac_attachment_id', 'sac_attachment', 'request_id', ), + 'create_sac_realm': ('parent', 'sac_realm_id', 'sac_realm', 'request_id', ), + 'create_security_profile': ('parent', 'security_profile_id', 'security_profile', ), + 'create_security_profile_group': ('parent', 'security_profile_group_id', 'security_profile_group', ), + 'create_server_tls_policy': ('parent', 'server_tls_policy_id', 'server_tls_policy', ), + 'create_tls_inspection_policy': ('parent', 'tls_inspection_policy_id', 'tls_inspection_policy', ), + 'create_url_list': ('parent', 'url_list_id', 'url_list', ), + 'delete_authorization_policy': ('name', ), + 'delete_authz_policy': ('name', 'request_id', ), + 'delete_backend_authentication_config': ('name', 'etag', ), 'delete_client_tls_policy': ('name', ), + 'delete_dns_threat_detector': ('name', ), + 'delete_firewall_endpoint': ('name', 'request_id', ), + 'delete_firewall_endpoint_association': ('name', 'request_id', ), + 'delete_gateway_security_policy': ('name', ), + 'delete_gateway_security_policy_rule': ('name', ), + 'delete_intercept_deployment': ('name', 'request_id', ), + 'delete_intercept_deployment_group': ('name', 'request_id', ), + 'delete_intercept_endpoint_group': ('name', 'request_id', ), + 'delete_intercept_endpoint_group_association': ('name', 'request_id', ), + 'delete_mirroring_deployment': ('name', 'request_id', ), + 'delete_mirroring_deployment_group': ('name', 'request_id', ), + 'delete_mirroring_endpoint_group': ('name', 'request_id', ), + 'delete_mirroring_endpoint_group_association': ('name', 'request_id', ), + 'delete_partner_sse_gateway': ('name', 'request_id', ), + 'delete_partner_sse_realm': ('name', 'request_id', ), + 'delete_sac_attachment': ('name', 'request_id', ), + 'delete_sac_realm': ('name', 'request_id', ), + 'delete_security_profile': ('name', 'etag', ), + 'delete_security_profile_group': ('name', 'etag', ), + 'delete_server_tls_policy': ('name', ), + 'delete_tls_inspection_policy': ('name', 'force', ), + 'delete_url_list': ('name', ), + 'get_authorization_policy': ('name', ), + 'get_authz_policy': ('name', ), + 'get_backend_authentication_config': ('name', ), 'get_client_tls_policy': ('name', ), + 'get_dns_threat_detector': ('name', ), + 'get_firewall_endpoint': ('name', ), + 'get_firewall_endpoint_association': ('name', ), + 'get_gateway_security_policy': ('name', ), + 'get_gateway_security_policy_rule': ('name', ), + 'get_intercept_deployment': ('name', ), + 'get_intercept_deployment_group': ('name', ), + 'get_intercept_endpoint_group': ('name', ), + 'get_intercept_endpoint_group_association': ('name', ), + 'get_mirroring_deployment': ('name', ), + 'get_mirroring_deployment_group': ('name', ), + 'get_mirroring_endpoint_group': ('name', ), + 'get_mirroring_endpoint_group_association': ('name', ), + 'get_partner_sse_gateway': ('name', ), + 'get_partner_sse_realm': ('name', ), + 'get_sac_attachment': ('name', ), + 'get_sac_realm': ('name', ), + 'get_security_profile': ('name', ), + 'get_security_profile_group': ('name', ), + 'get_server_tls_policy': ('name', ), + 'get_sse_gateway_reference': ('name', ), + 'get_tls_inspection_policy': ('name', ), + 'get_url_list': ('name', ), + 'list_authorization_policies': ('parent', 'page_size', 'page_token', ), + 'list_authz_policies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backend_authentication_configs': ('parent', 'page_size', 'page_token', ), 'list_client_tls_policies': ('parent', 'page_size', 'page_token', ), + 'list_dns_threat_detectors': ('parent', 'page_size', 'page_token', ), + 'list_firewall_endpoint_associations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_firewall_endpoints': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_gateway_security_policies': ('parent', 'page_size', 'page_token', ), + 'list_gateway_security_policy_rules': ('parent', 'page_size', 'page_token', ), + 'list_intercept_deployment_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_intercept_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_intercept_endpoint_group_associations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_intercept_endpoint_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_mirroring_deployment_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_mirroring_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_mirroring_endpoint_group_associations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_mirroring_endpoint_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_partner_sse_gateways': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_partner_sse_realms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_sac_attachments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_sac_realms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_security_profile_groups': ('parent', 'page_size', 'page_token', ), + 'list_security_profiles': ('parent', 'page_size', 'page_token', ), + 'list_server_tls_policies': ('parent', 'page_size', 'page_token', 'return_partial_success', ), + 'list_sse_gateway_references': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_tls_inspection_policies': ('parent', 'page_size', 'page_token', ), + 'list_url_lists': ('parent', 'page_size', 'page_token', ), + 'update_authorization_policy': ('authorization_policy', 'update_mask', ), + 'update_authz_policy': ('update_mask', 'authz_policy', 'request_id', ), + 'update_backend_authentication_config': ('backend_authentication_config', 'update_mask', ), 'update_client_tls_policy': ('client_tls_policy', 'update_mask', ), + 'update_dns_threat_detector': ('dns_threat_detector', 'update_mask', ), + 'update_firewall_endpoint': ('update_mask', 'firewall_endpoint', 'request_id', ), + 'update_firewall_endpoint_association': ('update_mask', 'firewall_endpoint_association', 'request_id', ), + 'update_gateway_security_policy': ('gateway_security_policy', 'update_mask', ), + 'update_gateway_security_policy_rule': ('gateway_security_policy_rule', 'update_mask', ), + 'update_intercept_deployment': ('intercept_deployment', 'update_mask', 'request_id', ), + 'update_intercept_deployment_group': ('intercept_deployment_group', 'update_mask', 'request_id', ), + 'update_intercept_endpoint_group': ('intercept_endpoint_group', 'update_mask', 'request_id', ), + 'update_intercept_endpoint_group_association': ('intercept_endpoint_group_association', 'update_mask', 'request_id', ), + 'update_mirroring_deployment': ('mirroring_deployment', 'update_mask', 'request_id', ), + 'update_mirroring_deployment_group': ('mirroring_deployment_group', 'update_mask', 'request_id', ), + 'update_mirroring_endpoint_group': ('mirroring_endpoint_group', 'update_mask', 'request_id', ), + 'update_mirroring_endpoint_group_association': ('mirroring_endpoint_group_association', 'update_mask', 'request_id', ), + 'update_partner_sse_gateway': ('partner_sse_gateway', 'update_mask', 'request_id', ), + 'update_security_profile': ('update_mask', 'security_profile', ), + 'update_security_profile_group': ('update_mask', 'security_profile_group', ), + 'update_server_tls_policy': ('server_tls_policy', 'update_mask', ), + 'update_tls_inspection_policy': ('tls_inspection_policy', 'update_mask', ), + 'update_url_list': ('url_list', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_dns_threat_detector_service.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_dns_threat_detector_service.py new file mode 100644 index 000000000000..92fdea19966f --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_dns_threat_detector_service.py @@ -0,0 +1,8213 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.dns_threat_detector_service import ( + DnsThreatDetectorServiceAsyncClient, + DnsThreatDetectorServiceClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1alpha1.types import dns_threat_detector + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DnsThreatDetectorServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DnsThreatDetectorServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DnsThreatDetectorServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DnsThreatDetectorServiceClient._get_client_cert_source(None, False) is None + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + default_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DnsThreatDetectorServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DnsThreatDetectorServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DnsThreatDetectorServiceClient._get_universe_domain(None, None) + == DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DnsThreatDetectorServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DnsThreatDetectorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DnsThreatDetectorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DnsThreatDetectorServiceClient, "grpc"), + (DnsThreatDetectorServiceAsyncClient, "grpc_asyncio"), + (DnsThreatDetectorServiceClient, "rest"), + ], +) +def test_dns_threat_detector_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DnsThreatDetectorServiceGrpcTransport, "grpc"), + (transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DnsThreatDetectorServiceRestTransport, "rest"), + ], +) +def test_dns_threat_detector_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DnsThreatDetectorServiceClient, "grpc"), + (DnsThreatDetectorServiceAsyncClient, "grpc_asyncio"), + (DnsThreatDetectorServiceClient, "rest"), + ], +) +def test_dns_threat_detector_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_dns_threat_detector_service_client_get_transport_class(): + transport = DnsThreatDetectorServiceClient.get_transport_class() + available_transports = [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceRestTransport, + ] + assert transport in available_transports + + transport = DnsThreatDetectorServiceClient.get_transport_class("grpc") + assert transport == transports.DnsThreatDetectorServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + DnsThreatDetectorServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + DnsThreatDetectorServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + "true", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + "false", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + "true", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dns_threat_detector_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [DnsThreatDetectorServiceClient, DnsThreatDetectorServiceAsyncClient], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [DnsThreatDetectorServiceClient, DnsThreatDetectorServiceAsyncClient], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + default_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + ), + ], +) +def test_dns_threat_detector_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + None, + ), + ], +) +def test_dns_threat_detector_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_dns_threat_detector_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DnsThreatDetectorServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_dns_threat_detector_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.ListDnsThreatDetectorsRequest, + dict, + ], +) +def test_list_dns_threat_detectors(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_dns_threat_detectors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.ListDnsThreatDetectorsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_dns_threat_detectors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.ListDnsThreatDetectorsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_dns_threat_detectors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_dns_threat_detectors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_dns_threat_detectors + ] = mock_rpc + request = {} + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_dns_threat_detectors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_dns_threat_detectors + ] = mock_rpc + + request = {} + await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_from_dict(): + await test_list_dns_threat_detectors_async(request_type=dict) + + +def test_list_dns_threat_detectors_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_dns_threat_detectors_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dns_threat_detectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_dns_threat_detectors_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dns_threat_detectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +def test_list_dns_threat_detectors_pager(transport_name: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_dns_threat_detectors( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in results + ) + + +def test_list_dns_threat_detectors_pages(transport_name: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dns_threat_detectors(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_pager(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dns_threat_detectors( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_pages(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_dns_threat_detectors(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.GetDnsThreatDetectorRequest, + dict, + ], +) +def test_get_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.GetDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +def test_get_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.GetDnsThreatDetectorRequest( + name="name_value", + ) + + +def test_get_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_dns_threat_detector + ] = mock_rpc + request = {} + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_dns_threat_detector + ] = mock_rpc + + request = {} + await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.GetDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async_from_dict(): + await test_get_dns_threat_detector_async(request_type=dict) + + +def test_get_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.GetDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = dns_threat_detector.DnsThreatDetector() + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.GetDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector() + ) + await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + dict, + ], +) +def test_create_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +def test_create_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_dns_threat_detector.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +def test_create_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_dns_threat_detector + ] = mock_rpc + request = {} + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_dns_threat_detector + ] = mock_rpc + + request = {} + await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async_from_dict(): + await test_create_dns_threat_detector_async(request_type=dict) + + +def test_create_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dns_threat_detector( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].dns_threat_detector_id + mock_val = "dns_threat_detector_id_value" + assert arg == mock_val + + +def test_create_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dns_threat_detector( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].dns_threat_detector_id + mock_val = "dns_threat_detector_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + dict, + ], +) +def test_update_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +def test_update_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + +def test_update_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_dns_threat_detector + ] = mock_rpc + request = {} + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_dns_threat_detector + ] = mock_rpc + + request = {} + await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async_from_dict(): + await test_update_dns_threat_detector_async(request_type=dict) + + +def test_update_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + request.dns_threat_detector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dns_threat_detector.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + request.dns_threat_detector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dns_threat_detector.name=name_value", + ) in kw["metadata"] + + +def test_update_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_dns_threat_detector( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_dns_threat_detector( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + dict, + ], +) +def test_delete_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + +def test_delete_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_dns_threat_detector + ] = mock_rpc + request = {} + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_dns_threat_detector + ] = mock_rpc + + request = {} + await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async_from_dict(): + await test_delete_dns_threat_detector_async(request_type=dict) + + +def test_delete_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = None + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_list_dns_threat_detectors_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_dns_threat_detectors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_dns_threat_detectors + ] = mock_rpc + + request = {} + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_dns_threat_detectors_rest_required_fields( + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dns_threat_detectors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dns_threat_detectors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_dns_threat_detectors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dns_threat_detectors_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dns_threat_detectors._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_dns_threat_detectors_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_dns_threat_detectors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/dnsThreatDetectors" + % client.transport._host, + args[1], + ) + + +def test_list_dns_threat_detectors_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +def test_list_dns_threat_detectors_rest_pager(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + dns_threat_detector.ListDnsThreatDetectorsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_dns_threat_detectors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in results + ) + + pages = list(client.list_dns_threat_detectors(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_dns_threat_detector + ] = mock_rpc + + request = {} + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_dns_threat_detector_rest_required_fields( + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_get_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_create_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_dns_threat_detector + ] = mock_rpc + + request = {} + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_dns_threat_detector_rest_required_fields( + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dns_threat_detector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dns_threat_detector_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dnsThreatDetectorId",)) + & set( + ( + "parent", + "dnsThreatDetector", + ) + ) + ) + + +def test_create_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/dnsThreatDetectors" + % client.transport._host, + args[1], + ) + + +def test_create_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +def test_update_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_dns_threat_detector + ] = mock_rpc + + request = {} + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_dns_threat_detector_rest_required_fields( + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dns_threat_detector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dnsThreatDetector",))) + + +def test_update_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{dns_threat_detector.name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_dns_threat_detector + ] = mock_rpc + + request = {} + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_dns_threat_detector_rest_required_fields( + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DnsThreatDetectorServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + transports.DnsThreatDetectorServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DnsThreatDetectorServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dns_threat_detectors_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = dns_threat_detector.DnsThreatDetector() + client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = None + client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DnsThreatDetectorServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DnsThreatDetectorServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_dns_threat_detectors_rest_bad_request( + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_dns_threat_detectors(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.ListDnsThreatDetectorsRequest, + dict, + ], +) +def test_list_dns_threat_detectors_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_dns_threat_detectors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dns_threat_detectors_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_list_dns_threat_detectors", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_list_dns_threat_detectors_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_list_dns_threat_detectors", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dns_threat_detector.ListDnsThreatDetectorsRequest.pb( + dns_threat_detector.ListDnsThreatDetectorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.to_json( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + req.return_value.content = return_value + + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + post_with_metadata.return_value = ( + dns_threat_detector.ListDnsThreatDetectorsResponse(), + metadata, + ) + + client.list_dns_threat_detectors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_dns_threat_detector_rest_bad_request( + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.GetDnsThreatDetectorRequest, + dict, + ], +) +def test_get_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_get_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_get_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_get_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dns_threat_detector.GetDnsThreatDetectorRequest.pb( + dns_threat_detector.GetDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dns_threat_detector.DnsThreatDetector.to_json( + dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = dns_threat_detector.GetDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.get_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_dns_threat_detector_rest_bad_request( + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + dict, + ], +) +def test_create_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dns_threat_detector"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "excluded_networks": ["excluded_networks_value1", "excluded_networks_value2"], + "provider": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.meta.fields[ + "dns_threat_detector" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dns_threat_detector"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dns_threat_detector"][field])): + del request_init["dns_threat_detector"][field][i][subfield] + else: + del request_init["dns_threat_detector"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_create_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_create_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_create_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.pb( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcn_dns_threat_detector.DnsThreatDetector.to_json( + gcn_dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcn_dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + gcn_dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.create_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_dns_threat_detector_rest_bad_request( + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + dict, + ], +) +def test_update_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + request_init["dns_threat_detector"] = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "excluded_networks": ["excluded_networks_value1", "excluded_networks_value2"], + "provider": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.meta.fields[ + "dns_threat_detector" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dns_threat_detector"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dns_threat_detector"][field])): + del request_init["dns_threat_detector"][field][i][subfield] + else: + del request_init["dns_threat_detector"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_update_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_update_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_update_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.pb( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcn_dns_threat_detector.DnsThreatDetector.to_json( + gcn_dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcn_dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + gcn_dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.update_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_dns_threat_detector_rest_bad_request( + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + dict, + ], +) +def test_delete_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_delete_dns_threat_detector", + ) as pre: + pre.assert_not_called() + pb_message = dns_threat_detector.DeleteDnsThreatDetectorRequest.pb( + dns_threat_detector.DeleteDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dns_threat_detectors_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DnsThreatDetectorServiceGrpcTransport, + ) + + +def test_dns_threat_detector_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DnsThreatDetectorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dns_threat_detector_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DnsThreatDetectorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_dns_threat_detectors", + "get_dns_threat_detector", + "create_dns_threat_detector", + "update_dns_threat_detector", + "delete_dns_threat_detector", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dns_threat_detector_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DnsThreatDetectorServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dns_threat_detector_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DnsThreatDetectorServiceTransport() + adc.assert_called_once() + + +def test_dns_threat_detector_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DnsThreatDetectorServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + transports.DnsThreatDetectorServiceRestTransport, + ], +) +def test_dns_threat_detector_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DnsThreatDetectorServiceGrpcTransport, grpc_helpers), + (transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_dns_threat_detector_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_dns_threat_detector_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DnsThreatDetectorServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dns_threat_detector_service_host_no_port(transport_name): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dns_threat_detector_service_host_with_port(transport_name): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_dns_threat_detector_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DnsThreatDetectorServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DnsThreatDetectorServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_dns_threat_detectors._session + session2 = client2.transport.list_dns_threat_detectors._session + assert session1 != session2 + session1 = client1.transport.get_dns_threat_detector._session + session2 = client2.transport.get_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.create_dns_threat_detector._session + session2 = client2.transport.create_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.update_dns_threat_detector._session + session2 = client2.transport.update_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.delete_dns_threat_detector._session + session2 = client2.transport.delete_dns_threat_detector._session + assert session1 != session2 + + +def test_dns_threat_detector_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dns_threat_detector_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DnsThreatDetectorServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dns_threat_detector_path(): + project = "squid" + location = "clam" + dns_threat_detector = "whelk" + expected = "projects/{project}/locations/{location}/dnsThreatDetectors/{dns_threat_detector}".format( + project=project, + location=location, + dns_threat_detector=dns_threat_detector, + ) + actual = DnsThreatDetectorServiceClient.dns_threat_detector_path( + project, location, dns_threat_detector + ) + assert expected == actual + + +def test_parse_dns_threat_detector_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dns_threat_detector": "nudibranch", + } + path = DnsThreatDetectorServiceClient.dns_threat_detector_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_dns_threat_detector_path(path) + assert expected == actual + + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = DnsThreatDetectorServiceClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = DnsThreatDetectorServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DnsThreatDetectorServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DnsThreatDetectorServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DnsThreatDetectorServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DnsThreatDetectorServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DnsThreatDetectorServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DnsThreatDetectorServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = DnsThreatDetectorServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DnsThreatDetectorServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DnsThreatDetectorServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DnsThreatDetectorServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DnsThreatDetectorServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DnsThreatDetectorServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DnsThreatDetectorServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_firewall_activation.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_firewall_activation.py new file mode 100644 index 000000000000..4b71206e3fff --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_firewall_activation.py @@ -0,0 +1,12735 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.firewall_activation import ( + FirewallActivationAsyncClient, + FirewallActivationClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import common, firewall_activation + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FirewallActivationClient._get_default_mtls_endpoint(None) is None + assert ( + FirewallActivationClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + FirewallActivationClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + FirewallActivationClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FirewallActivationClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FirewallActivationClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FirewallActivationClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + FirewallActivationClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FirewallActivationClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FirewallActivationClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FirewallActivationClient._get_client_cert_source(None, False) is None + assert ( + FirewallActivationClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + FirewallActivationClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + FirewallActivationClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + FirewallActivationClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + FirewallActivationClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationClient), +) +@mock.patch.object( + FirewallActivationAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FirewallActivationClient._DEFAULT_UNIVERSE + default_endpoint = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + FirewallActivationClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + FirewallActivationClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == FirewallActivationClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FirewallActivationClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + FirewallActivationClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == FirewallActivationClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FirewallActivationClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == FirewallActivationClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FirewallActivationClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + FirewallActivationClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + FirewallActivationClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + FirewallActivationClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + FirewallActivationClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + FirewallActivationClient._get_universe_domain(None, None) + == FirewallActivationClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + FirewallActivationClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirewallActivationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirewallActivationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FirewallActivationClient, "grpc"), + (FirewallActivationAsyncClient, "grpc_asyncio"), + (FirewallActivationClient, "rest"), + ], +) +def test_firewall_activation_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.FirewallActivationGrpcTransport, "grpc"), + (transports.FirewallActivationGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FirewallActivationRestTransport, "rest"), + ], +) +def test_firewall_activation_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FirewallActivationClient, "grpc"), + (FirewallActivationAsyncClient, "grpc_asyncio"), + (FirewallActivationClient, "rest"), + ], +) +def test_firewall_activation_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_firewall_activation_client_get_transport_class(): + transport = FirewallActivationClient.get_transport_class() + available_transports = [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationRestTransport, + ] + assert transport in available_transports + + transport = FirewallActivationClient.get_transport_class("grpc") + assert transport == transports.FirewallActivationGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FirewallActivationClient, transports.FirewallActivationGrpcTransport, "grpc"), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FirewallActivationClient, transports.FirewallActivationRestTransport, "rest"), + ], +) +@mock.patch.object( + FirewallActivationClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationClient), +) +@mock.patch.object( + FirewallActivationAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationAsyncClient), +) +def test_firewall_activation_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FirewallActivationClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FirewallActivationClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + FirewallActivationClient, + transports.FirewallActivationGrpcTransport, + "grpc", + "true", + ), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + FirewallActivationClient, + transports.FirewallActivationGrpcTransport, + "grpc", + "false", + ), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + FirewallActivationClient, + transports.FirewallActivationRestTransport, + "rest", + "true", + ), + ( + FirewallActivationClient, + transports.FirewallActivationRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + FirewallActivationClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationClient), +) +@mock.patch.object( + FirewallActivationAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_firewall_activation_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [FirewallActivationClient, FirewallActivationAsyncClient] +) +@mock.patch.object( + FirewallActivationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FirewallActivationClient), +) +@mock.patch.object( + FirewallActivationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FirewallActivationAsyncClient), +) +def test_firewall_activation_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [FirewallActivationClient, FirewallActivationAsyncClient] +) +@mock.patch.object( + FirewallActivationClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationClient), +) +@mock.patch.object( + FirewallActivationAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FirewallActivationAsyncClient), +) +def test_firewall_activation_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FirewallActivationClient._DEFAULT_UNIVERSE + default_endpoint = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FirewallActivationClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FirewallActivationClient, transports.FirewallActivationGrpcTransport, "grpc"), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FirewallActivationClient, transports.FirewallActivationRestTransport, "rest"), + ], +) +def test_firewall_activation_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FirewallActivationClient, + transports.FirewallActivationGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + FirewallActivationClient, + transports.FirewallActivationRestTransport, + "rest", + None, + ), + ], +) +def test_firewall_activation_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_firewall_activation_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.firewall_activation.transports.FirewallActivationGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = FirewallActivationClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + FirewallActivationClient, + transports.FirewallActivationGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_firewall_activation_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.ListFirewallEndpointsRequest, + dict, + ], +) +def test_list_firewall_endpoints(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.ListFirewallEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.ListFirewallEndpointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_firewall_endpoints_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.ListFirewallEndpointsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_firewall_endpoints(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.ListFirewallEndpointsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_firewall_endpoints_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_firewall_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_firewall_endpoints + ] = mock_rpc + request = {} + client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_firewall_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_firewall_endpoints + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_firewall_endpoints + ] = mock_rpc + + request = {} + await client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_firewall_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.ListFirewallEndpointsRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.ListFirewallEndpointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_async_from_dict(): + await test_list_firewall_endpoints_async(request_type=dict) + + +def test_list_firewall_endpoints_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.ListFirewallEndpointsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + call.return_value = firewall_activation.ListFirewallEndpointsResponse() + client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.ListFirewallEndpointsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointsResponse() + ) + await client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_firewall_endpoints_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.ListFirewallEndpointsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_firewall_endpoints( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_firewall_endpoints_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_firewall_endpoints( + firewall_activation.ListFirewallEndpointsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.ListFirewallEndpointsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_firewall_endpoints( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_firewall_endpoints( + firewall_activation.ListFirewallEndpointsRequest(), + parent="parent_value", + ) + + +def test_list_firewall_endpoints_pager(transport_name: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_firewall_endpoints(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, firewall_activation.FirewallEndpoint) for i in results) + + +def test_list_firewall_endpoints_pages(transport_name: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + ), + RuntimeError, + ) + pages = list(client.list_firewall_endpoints(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_async_pager(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_firewall_endpoints( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, firewall_activation.FirewallEndpoint) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_firewall_endpoints_async_pages(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_firewall_endpoints(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.GetFirewallEndpointRequest, + dict, + ], +) +def test_get_firewall_endpoint(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpoint( + name="name_value", + description="description_value", + state=firewall_activation.FirewallEndpoint.State.CREATING, + reconciling=True, + associated_networks=["associated_networks_value"], + satisfies_pzs=True, + satisfies_pzi=True, + billing_project_id="billing_project_id_value", + ) + response = client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.GetFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpoint) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == firewall_activation.FirewallEndpoint.State.CREATING + assert response.reconciling is True + assert response.associated_networks == ["associated_networks_value"] + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + assert response.billing_project_id == "billing_project_id_value" + + +def test_get_firewall_endpoint_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.GetFirewallEndpointRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_firewall_endpoint(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.GetFirewallEndpointRequest( + name="name_value", + ) + + +def test_get_firewall_endpoint_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_firewall_endpoint + ] = mock_rpc + request = {} + client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_firewall_endpoint + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_firewall_endpoint + ] = mock_rpc + + request = {} + await client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.GetFirewallEndpointRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpoint( + name="name_value", + description="description_value", + state=firewall_activation.FirewallEndpoint.State.CREATING, + reconciling=True, + associated_networks=["associated_networks_value"], + satisfies_pzs=True, + satisfies_pzi=True, + billing_project_id="billing_project_id_value", + ) + ) + response = await client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.GetFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpoint) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == firewall_activation.FirewallEndpoint.State.CREATING + assert response.reconciling is True + assert response.associated_networks == ["associated_networks_value"] + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + assert response.billing_project_id == "billing_project_id_value" + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_async_from_dict(): + await test_get_firewall_endpoint_async(request_type=dict) + + +def test_get_firewall_endpoint_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.GetFirewallEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + call.return_value = firewall_activation.FirewallEndpoint() + client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.GetFirewallEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpoint() + ) + await client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_firewall_endpoint_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpoint() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_firewall_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_firewall_endpoint_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_firewall_endpoint( + firewall_activation.GetFirewallEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpoint() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpoint() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_firewall_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_firewall_endpoint( + firewall_activation.GetFirewallEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.CreateFirewallEndpointRequest, + dict, + ], +) +def test_create_firewall_endpoint(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.CreateFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_firewall_endpoint_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_firewall_endpoint(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.CreateFirewallEndpointRequest( + parent="parent_value", + firewall_endpoint_id="firewall_endpoint_id_value", + request_id="request_id_value", + ) + + +def test_create_firewall_endpoint_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_firewall_endpoint + ] = mock_rpc + request = {} + client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_firewall_endpoint + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_firewall_endpoint + ] = mock_rpc + + request = {} + await client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.CreateFirewallEndpointRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.CreateFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_async_from_dict(): + await test_create_firewall_endpoint_async(request_type=dict) + + +def test_create_firewall_endpoint_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.CreateFirewallEndpointRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.CreateFirewallEndpointRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_firewall_endpoint_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_firewall_endpoint( + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].firewall_endpoint + mock_val = firewall_activation.FirewallEndpoint(name="name_value") + assert arg == mock_val + arg = args[0].firewall_endpoint_id + mock_val = "firewall_endpoint_id_value" + assert arg == mock_val + + +def test_create_firewall_endpoint_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_firewall_endpoint( + firewall_activation.CreateFirewallEndpointRequest(), + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_firewall_endpoint( + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].firewall_endpoint + mock_val = firewall_activation.FirewallEndpoint(name="name_value") + assert arg == mock_val + arg = args[0].firewall_endpoint_id + mock_val = "firewall_endpoint_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_firewall_endpoint( + firewall_activation.CreateFirewallEndpointRequest(), + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.DeleteFirewallEndpointRequest, + dict, + ], +) +def test_delete_firewall_endpoint(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.DeleteFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_firewall_endpoint_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.DeleteFirewallEndpointRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_firewall_endpoint(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.DeleteFirewallEndpointRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_firewall_endpoint_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_firewall_endpoint + ] = mock_rpc + request = {} + client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_firewall_endpoint + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_firewall_endpoint + ] = mock_rpc + + request = {} + await client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.DeleteFirewallEndpointRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.DeleteFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_async_from_dict(): + await test_delete_firewall_endpoint_async(request_type=dict) + + +def test_delete_firewall_endpoint_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.DeleteFirewallEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.DeleteFirewallEndpointRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_firewall_endpoint_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_firewall_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_firewall_endpoint_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_firewall_endpoint( + firewall_activation.DeleteFirewallEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_firewall_endpoint( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_firewall_endpoint( + firewall_activation.DeleteFirewallEndpointRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.UpdateFirewallEndpointRequest, + dict, + ], +) +def test_update_firewall_endpoint(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.UpdateFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_firewall_endpoint_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.UpdateFirewallEndpointRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_firewall_endpoint(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.UpdateFirewallEndpointRequest( + request_id="request_id_value", + ) + + +def test_update_firewall_endpoint_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_firewall_endpoint + ] = mock_rpc + request = {} + client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_firewall_endpoint + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_firewall_endpoint + ] = mock_rpc + + request = {} + await client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.UpdateFirewallEndpointRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.UpdateFirewallEndpointRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_async_from_dict(): + await test_update_firewall_endpoint_async(request_type=dict) + + +def test_update_firewall_endpoint_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.UpdateFirewallEndpointRequest() + + request.firewall_endpoint.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "firewall_endpoint.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.UpdateFirewallEndpointRequest() + + request.firewall_endpoint.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "firewall_endpoint.name=name_value", + ) in kw["metadata"] + + +def test_update_firewall_endpoint_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_firewall_endpoint( + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].firewall_endpoint + mock_val = firewall_activation.FirewallEndpoint(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_firewall_endpoint_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_firewall_endpoint( + firewall_activation.UpdateFirewallEndpointRequest(), + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_firewall_endpoint( + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].firewall_endpoint + mock_val = firewall_activation.FirewallEndpoint(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_firewall_endpoint( + firewall_activation.UpdateFirewallEndpointRequest(), + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.ListFirewallEndpointAssociationsRequest, + dict, + ], +) +def test_list_firewall_endpoint_associations(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.ListFirewallEndpointAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_firewall_endpoint_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_firewall_endpoint_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.ListFirewallEndpointAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_firewall_endpoint_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_firewall_endpoint_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_firewall_endpoint_associations + ] = mock_rpc + request = {} + client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_firewall_endpoint_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_firewall_endpoint_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_firewall_endpoint_associations + ] = mock_rpc + + request = {} + await client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_firewall_endpoint_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.ListFirewallEndpointAssociationsRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.ListFirewallEndpointAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_async_from_dict(): + await test_list_firewall_endpoint_associations_async(request_type=dict) + + +def test_list_firewall_endpoint_associations_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.ListFirewallEndpointAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + call.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.ListFirewallEndpointAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + await client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_firewall_endpoint_associations_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_firewall_endpoint_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_firewall_endpoint_associations_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_firewall_endpoint_associations( + firewall_activation.ListFirewallEndpointAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_firewall_endpoint_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_firewall_endpoint_associations( + firewall_activation.ListFirewallEndpointAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_firewall_endpoint_associations_pager(transport_name: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_firewall_endpoint_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, firewall_activation.FirewallEndpointAssociation) + for i in results + ) + + +def test_list_firewall_endpoint_associations_pages(transport_name: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_firewall_endpoint_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_async_pager(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_firewall_endpoint_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, firewall_activation.FirewallEndpointAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_async_pages(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_firewall_endpoint_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.GetFirewallEndpointAssociationRequest, + dict, + ], +) +def test_get_firewall_endpoint_association(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpointAssociation( + name="name_value", + state=firewall_activation.FirewallEndpointAssociation.State.CREATING, + network="network_value", + firewall_endpoint="firewall_endpoint_value", + tls_inspection_policy="tls_inspection_policy_value", + reconciling=True, + disabled=True, + ) + response = client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.GetFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpointAssociation) + assert response.name == "name_value" + assert ( + response.state == firewall_activation.FirewallEndpointAssociation.State.CREATING + ) + assert response.network == "network_value" + assert response.firewall_endpoint == "firewall_endpoint_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + assert response.reconciling is True + assert response.disabled is True + + +def test_get_firewall_endpoint_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_firewall_endpoint_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.GetFirewallEndpointAssociationRequest( + name="name_value", + ) + + +def test_get_firewall_endpoint_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_firewall_endpoint_association + ] = mock_rpc + request = {} + client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_firewall_endpoint_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_firewall_endpoint_association + ] = mock_rpc + + request = {} + await client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.GetFirewallEndpointAssociationRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpointAssociation( + name="name_value", + state=firewall_activation.FirewallEndpointAssociation.State.CREATING, + network="network_value", + firewall_endpoint="firewall_endpoint_value", + tls_inspection_policy="tls_inspection_policy_value", + reconciling=True, + disabled=True, + ) + ) + response = await client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.GetFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpointAssociation) + assert response.name == "name_value" + assert ( + response.state == firewall_activation.FirewallEndpointAssociation.State.CREATING + ) + assert response.network == "network_value" + assert response.firewall_endpoint == "firewall_endpoint_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + assert response.reconciling is True + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_async_from_dict(): + await test_get_firewall_endpoint_association_async(request_type=dict) + + +def test_get_firewall_endpoint_association_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.GetFirewallEndpointAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = firewall_activation.FirewallEndpointAssociation() + client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.GetFirewallEndpointAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpointAssociation() + ) + await client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_firewall_endpoint_association_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpointAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_firewall_endpoint_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_firewall_endpoint_association_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_firewall_endpoint_association( + firewall_activation.GetFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = firewall_activation.FirewallEndpointAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpointAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_firewall_endpoint_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_firewall_endpoint_association( + firewall_activation.GetFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.CreateFirewallEndpointAssociationRequest, + dict, + ], +) +def test_create_firewall_endpoint_association(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.CreateFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_firewall_endpoint_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_firewall_endpoint_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.CreateFirewallEndpointAssociationRequest( + parent="parent_value", + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + request_id="request_id_value", + ) + + +def test_create_firewall_endpoint_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_firewall_endpoint_association + ] = mock_rpc + request = {} + client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_firewall_endpoint_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_firewall_endpoint_association + ] = mock_rpc + + request = {} + await client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.CreateFirewallEndpointAssociationRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.CreateFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_async_from_dict(): + await test_create_firewall_endpoint_association_async(request_type=dict) + + +def test_create_firewall_endpoint_association_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.CreateFirewallEndpointAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.CreateFirewallEndpointAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_firewall_endpoint_association_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_firewall_endpoint_association( + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].firewall_endpoint_association + mock_val = firewall_activation.FirewallEndpointAssociation(name="name_value") + assert arg == mock_val + arg = args[0].firewall_endpoint_association_id + mock_val = "firewall_endpoint_association_id_value" + assert arg == mock_val + + +def test_create_firewall_endpoint_association_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_firewall_endpoint_association( + firewall_activation.CreateFirewallEndpointAssociationRequest(), + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_firewall_endpoint_association( + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].firewall_endpoint_association + mock_val = firewall_activation.FirewallEndpointAssociation(name="name_value") + assert arg == mock_val + arg = args[0].firewall_endpoint_association_id + mock_val = "firewall_endpoint_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_firewall_endpoint_association( + firewall_activation.CreateFirewallEndpointAssociationRequest(), + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.DeleteFirewallEndpointAssociationRequest, + dict, + ], +) +def test_delete_firewall_endpoint_association(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.DeleteFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_firewall_endpoint_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.DeleteFirewallEndpointAssociationRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_firewall_endpoint_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.DeleteFirewallEndpointAssociationRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_firewall_endpoint_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_firewall_endpoint_association + ] = mock_rpc + request = {} + client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_firewall_endpoint_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_firewall_endpoint_association + ] = mock_rpc + + request = {} + await client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.DeleteFirewallEndpointAssociationRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.DeleteFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_async_from_dict(): + await test_delete_firewall_endpoint_association_async(request_type=dict) + + +def test_delete_firewall_endpoint_association_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.DeleteFirewallEndpointAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.DeleteFirewallEndpointAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_firewall_endpoint_association_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_firewall_endpoint_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_firewall_endpoint_association_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_firewall_endpoint_association( + firewall_activation.DeleteFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_firewall_endpoint_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_firewall_endpoint_association( + firewall_activation.DeleteFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.UpdateFirewallEndpointAssociationRequest, + dict, + ], +) +def test_update_firewall_endpoint_association(request_type, transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firewall_activation.UpdateFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_firewall_endpoint_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firewall_activation.UpdateFirewallEndpointAssociationRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_firewall_endpoint_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firewall_activation.UpdateFirewallEndpointAssociationRequest( + request_id="request_id_value", + ) + + +def test_update_firewall_endpoint_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_firewall_endpoint_association + ] = mock_rpc + request = {} + client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_firewall_endpoint_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_firewall_endpoint_association + ] = mock_rpc + + request = {} + await client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_async( + transport: str = "grpc_asyncio", + request_type=firewall_activation.UpdateFirewallEndpointAssociationRequest, +): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firewall_activation.UpdateFirewallEndpointAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_async_from_dict(): + await test_update_firewall_endpoint_association_async(request_type=dict) + + +def test_update_firewall_endpoint_association_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.UpdateFirewallEndpointAssociationRequest() + + request.firewall_endpoint_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "firewall_endpoint_association.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = firewall_activation.UpdateFirewallEndpointAssociationRequest() + + request.firewall_endpoint_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "firewall_endpoint_association.name=name_value", + ) in kw["metadata"] + + +def test_update_firewall_endpoint_association_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_firewall_endpoint_association( + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].firewall_endpoint_association + mock_val = firewall_activation.FirewallEndpointAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_firewall_endpoint_association_flattened_error(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_firewall_endpoint_association( + firewall_activation.UpdateFirewallEndpointAssociationRequest(), + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_flattened_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_firewall_endpoint_association( + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].firewall_endpoint_association + mock_val = firewall_activation.FirewallEndpointAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_flattened_error_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_firewall_endpoint_association( + firewall_activation.UpdateFirewallEndpointAssociationRequest(), + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_firewall_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_firewall_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_firewall_endpoints + ] = mock_rpc + + request = {} + client.list_firewall_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_firewall_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_firewall_endpoints_rest_required_fields( + request_type=firewall_activation.ListFirewallEndpointsRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firewall_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firewall_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.ListFirewallEndpointsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_firewall_endpoints(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_firewall_endpoints_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_firewall_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_firewall_endpoints_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firewall_activation.ListFirewallEndpointsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_firewall_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/firewallEndpoints" + % client.transport._host, + args[1], + ) + + +def test_list_firewall_endpoints_rest_flattened_error(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_firewall_endpoints( + firewall_activation.ListFirewallEndpointsRequest(), + parent="parent_value", + ) + + +def test_list_firewall_endpoints_rest_pager(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointsResponse( + firewall_endpoints=[ + firewall_activation.FirewallEndpoint(), + firewall_activation.FirewallEndpoint(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + firewall_activation.ListFirewallEndpointsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_firewall_endpoints(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, firewall_activation.FirewallEndpoint) for i in results) + + pages = list(client.list_firewall_endpoints(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_firewall_endpoint_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_firewall_endpoint + ] = mock_rpc + + request = {} + client.get_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_firewall_endpoint_rest_required_fields( + request_type=firewall_activation.GetFirewallEndpointRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_firewall_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_firewall_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpoint() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_firewall_endpoint(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_firewall_endpoint_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_firewall_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_firewall_endpoint_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpoint() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_firewall_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/firewallEndpoints/*}" + % client.transport._host, + args[1], + ) + + +def test_get_firewall_endpoint_rest_flattened_error(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_firewall_endpoint( + firewall_activation.GetFirewallEndpointRequest(), + name="name_value", + ) + + +def test_create_firewall_endpoint_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_firewall_endpoint + ] = mock_rpc + + request = {} + client.create_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_firewall_endpoint_rest_required_fields( + request_type=firewall_activation.CreateFirewallEndpointRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["firewall_endpoint_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "firewallEndpointId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firewall_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "firewallEndpointId" in jsonified_request + assert ( + jsonified_request["firewallEndpointId"] == request_init["firewall_endpoint_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["firewallEndpointId"] = "firewall_endpoint_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firewall_endpoint._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "firewall_endpoint_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "firewallEndpointId" in jsonified_request + assert jsonified_request["firewallEndpointId"] == "firewall_endpoint_id_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_firewall_endpoint(request) + + expected_params = [ + ( + "firewallEndpointId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_firewall_endpoint_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_firewall_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "firewallEndpointId", + "requestId", + ) + ) + & set( + ( + "parent", + "firewallEndpointId", + "firewallEndpoint", + ) + ) + ) + + +def test_create_firewall_endpoint_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_firewall_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/firewallEndpoints" + % client.transport._host, + args[1], + ) + + +def test_create_firewall_endpoint_rest_flattened_error(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_firewall_endpoint( + firewall_activation.CreateFirewallEndpointRequest(), + parent="parent_value", + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + firewall_endpoint_id="firewall_endpoint_id_value", + ) + + +def test_delete_firewall_endpoint_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_firewall_endpoint + ] = mock_rpc + + request = {} + client.delete_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_firewall_endpoint_rest_required_fields( + request_type=firewall_activation.DeleteFirewallEndpointRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firewall_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firewall_endpoint._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_firewall_endpoint(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_firewall_endpoint_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_firewall_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_firewall_endpoint_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_firewall_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/firewallEndpoints/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_firewall_endpoint_rest_flattened_error(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_firewall_endpoint( + firewall_activation.DeleteFirewallEndpointRequest(), + name="name_value", + ) + + +def test_update_firewall_endpoint_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_firewall_endpoint + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_firewall_endpoint + ] = mock_rpc + + request = {} + client.update_firewall_endpoint(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_firewall_endpoint(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_firewall_endpoint_rest_required_fields( + request_type=firewall_activation.UpdateFirewallEndpointRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_firewall_endpoint._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_firewall_endpoint._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_firewall_endpoint(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_firewall_endpoint_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_firewall_endpoint._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "firewallEndpoint", + ) + ) + ) + + +def test_update_firewall_endpoint_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "firewall_endpoint": { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_firewall_endpoint(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{firewall_endpoint.name=organizations/*/locations/*/firewallEndpoints/*}" + % client.transport._host, + args[1], + ) + + +def test_update_firewall_endpoint_rest_flattened_error(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_firewall_endpoint( + firewall_activation.UpdateFirewallEndpointRequest(), + firewall_endpoint=firewall_activation.FirewallEndpoint(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_firewall_endpoint_associations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_firewall_endpoint_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_firewall_endpoint_associations + ] = mock_rpc + + request = {} + client.list_firewall_endpoint_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_firewall_endpoint_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_firewall_endpoint_associations_rest_required_fields( + request_type=firewall_activation.ListFirewallEndpointAssociationsRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firewall_endpoint_associations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_firewall_endpoint_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointAssociationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_firewall_endpoint_associations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_firewall_endpoint_associations_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_firewall_endpoint_associations._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_firewall_endpoint_associations_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointAssociationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firewall_activation.ListFirewallEndpointAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_firewall_endpoint_associations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/firewallEndpointAssociations" + % client.transport._host, + args[1], + ) + + +def test_list_firewall_endpoint_associations_rest_flattened_error( + transport: str = "rest", +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_firewall_endpoint_associations( + firewall_activation.ListFirewallEndpointAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_firewall_endpoint_associations_rest_pager(transport: str = "rest"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="abc", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[], + next_page_token="def", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + ], + next_page_token="ghi", + ), + firewall_activation.ListFirewallEndpointAssociationsResponse( + firewall_endpoint_associations=[ + firewall_activation.FirewallEndpointAssociation(), + firewall_activation.FirewallEndpointAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + firewall_activation.ListFirewallEndpointAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_firewall_endpoint_associations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, firewall_activation.FirewallEndpointAssociation) + for i in results + ) + + pages = list( + client.list_firewall_endpoint_associations(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_firewall_endpoint_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_firewall_endpoint_association + ] = mock_rpc + + request = {} + client.get_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_firewall_endpoint_association_rest_required_fields( + request_type=firewall_activation.GetFirewallEndpointAssociationRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpointAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpointAssociation.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_firewall_endpoint_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_firewall_endpoint_association_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_firewall_endpoint_association._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_firewall_endpoint_association_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpointAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpointAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_firewall_endpoint_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/firewallEndpointAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_firewall_endpoint_association_rest_flattened_error( + transport: str = "rest", +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_firewall_endpoint_association( + firewall_activation.GetFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +def test_create_firewall_endpoint_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_firewall_endpoint_association + ] = mock_rpc + + request = {} + client.create_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_firewall_endpoint_association_rest_required_fields( + request_type=firewall_activation.CreateFirewallEndpointAssociationRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "firewall_endpoint_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_firewall_endpoint_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_firewall_endpoint_association_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_firewall_endpoint_association._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "firewallEndpointAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "firewallEndpointAssociation", + ) + ) + ) + + +def test_create_firewall_endpoint_association_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_firewall_endpoint_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/firewallEndpointAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_firewall_endpoint_association_rest_flattened_error( + transport: str = "rest", +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_firewall_endpoint_association( + firewall_activation.CreateFirewallEndpointAssociationRequest(), + parent="parent_value", + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + firewall_endpoint_association_id="firewall_endpoint_association_id_value", + ) + + +def test_delete_firewall_endpoint_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_firewall_endpoint_association + ] = mock_rpc + + request = {} + client.delete_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_firewall_endpoint_association_rest_required_fields( + request_type=firewall_activation.DeleteFirewallEndpointAssociationRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_firewall_endpoint_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_firewall_endpoint_association_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_firewall_endpoint_association._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_firewall_endpoint_association_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_firewall_endpoint_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/firewallEndpointAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_firewall_endpoint_association_rest_flattened_error( + transport: str = "rest", +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_firewall_endpoint_association( + firewall_activation.DeleteFirewallEndpointAssociationRequest(), + name="name_value", + ) + + +def test_update_firewall_endpoint_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_firewall_endpoint_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_firewall_endpoint_association + ] = mock_rpc + + request = {} + client.update_firewall_endpoint_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_firewall_endpoint_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_firewall_endpoint_association_rest_required_fields( + request_type=firewall_activation.UpdateFirewallEndpointAssociationRequest, +): + transport_class = transports.FirewallActivationRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_firewall_endpoint_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_firewall_endpoint_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_firewall_endpoint_association_rest_unset_required_fields(): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.update_firewall_endpoint_association._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "firewallEndpointAssociation", + ) + ) + ) + + +def test_update_firewall_endpoint_association_rest_flattened(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "firewall_endpoint_association": { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_firewall_endpoint_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{firewall_endpoint_association.name=projects/*/locations/*/firewallEndpointAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_firewall_endpoint_association_rest_flattened_error( + transport: str = "rest", +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_firewall_endpoint_association( + firewall_activation.UpdateFirewallEndpointAssociationRequest(), + firewall_endpoint_association=firewall_activation.FirewallEndpointAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallActivationClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallActivationClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallActivationClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallActivationClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirewallActivationClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirewallActivationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FirewallActivationGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + transports.FirewallActivationRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = FirewallActivationClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_firewall_endpoints_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + call.return_value = firewall_activation.ListFirewallEndpointsResponse() + client.list_firewall_endpoints(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_firewall_endpoint_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + call.return_value = firewall_activation.FirewallEndpoint() + client.get_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_firewall_endpoint_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_firewall_endpoint_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_firewall_endpoint_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_firewall_endpoint_associations_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + call.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + client.list_firewall_endpoint_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_firewall_endpoint_association_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = firewall_activation.FirewallEndpointAssociation() + client.get_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_firewall_endpoint_association_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_firewall_endpoint_association_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_firewall_endpoint_association_empty_call_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = FirewallActivationAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_firewall_endpoints_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_firewall_endpoints(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_firewall_endpoint_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpoint( + name="name_value", + description="description_value", + state=firewall_activation.FirewallEndpoint.State.CREATING, + reconciling=True, + associated_networks=["associated_networks_value"], + satisfies_pzs=True, + satisfies_pzi=True, + billing_project_id="billing_project_id_value", + ) + ) + await client.get_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_firewall_endpoint_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_firewall_endpoint_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_firewall_endpoint_associations_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.ListFirewallEndpointAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_firewall_endpoint_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_firewall_endpoint_association_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + firewall_activation.FirewallEndpointAssociation( + name="name_value", + state=firewall_activation.FirewallEndpointAssociation.State.CREATING, + network="network_value", + firewall_endpoint="firewall_endpoint_value", + tls_inspection_policy="tls_inspection_policy_value", + reconciling=True, + disabled=True, + ) + ) + await client.get_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_firewall_endpoint_association_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_firewall_endpoint_association_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_firewall_endpoint_association_empty_call_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = FirewallActivationClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_firewall_endpoints_rest_bad_request( + request_type=firewall_activation.ListFirewallEndpointsRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_firewall_endpoints(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.ListFirewallEndpointsRequest, + dict, + ], +) +def test_list_firewall_endpoints_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.ListFirewallEndpointsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_firewall_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_firewall_endpoints_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirewallActivationRestInterceptor, "post_list_firewall_endpoints" + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_list_firewall_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, "pre_list_firewall_endpoints" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.ListFirewallEndpointsRequest.pb( + firewall_activation.ListFirewallEndpointsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firewall_activation.ListFirewallEndpointsResponse.to_json( + firewall_activation.ListFirewallEndpointsResponse() + ) + req.return_value.content = return_value + + request = firewall_activation.ListFirewallEndpointsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firewall_activation.ListFirewallEndpointsResponse() + post_with_metadata.return_value = ( + firewall_activation.ListFirewallEndpointsResponse(), + metadata, + ) + + client.list_firewall_endpoints( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_firewall_endpoint_rest_bad_request( + request_type=firewall_activation.GetFirewallEndpointRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_firewall_endpoint(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.GetFirewallEndpointRequest, + dict, + ], +) +def test_get_firewall_endpoint_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpoint( + name="name_value", + description="description_value", + state=firewall_activation.FirewallEndpoint.State.CREATING, + reconciling=True, + associated_networks=["associated_networks_value"], + satisfies_pzs=True, + satisfies_pzi=True, + billing_project_id="billing_project_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_firewall_endpoint(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpoint) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == firewall_activation.FirewallEndpoint.State.CREATING + assert response.reconciling is True + assert response.associated_networks == ["associated_networks_value"] + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + assert response.billing_project_id == "billing_project_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_firewall_endpoint_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirewallActivationRestInterceptor, "post_get_firewall_endpoint" + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_get_firewall_endpoint_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, "pre_get_firewall_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.GetFirewallEndpointRequest.pb( + firewall_activation.GetFirewallEndpointRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firewall_activation.FirewallEndpoint.to_json( + firewall_activation.FirewallEndpoint() + ) + req.return_value.content = return_value + + request = firewall_activation.GetFirewallEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firewall_activation.FirewallEndpoint() + post_with_metadata.return_value = ( + firewall_activation.FirewallEndpoint(), + metadata, + ) + + client.get_firewall_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_firewall_endpoint_rest_bad_request( + request_type=firewall_activation.CreateFirewallEndpointRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_firewall_endpoint(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.CreateFirewallEndpointRequest, + dict, + ], +) +def test_create_firewall_endpoint_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["firewall_endpoint"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "reconciling": True, + "associated_networks": [ + "associated_networks_value1", + "associated_networks_value2", + ], + "associations": [{"name": "name_value", "network": "network_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, + "billing_project_id": "billing_project_id_value", + "endpoint_settings": {"jumbo_frames_enabled": True}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firewall_activation.CreateFirewallEndpointRequest.meta.fields[ + "firewall_endpoint" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_endpoint"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_endpoint"][field])): + del request_init["firewall_endpoint"][field][i][subfield] + else: + del request_init["firewall_endpoint"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_firewall_endpoint(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_firewall_endpoint_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, "post_create_firewall_endpoint" + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_create_firewall_endpoint_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, "pre_create_firewall_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.CreateFirewallEndpointRequest.pb( + firewall_activation.CreateFirewallEndpointRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.CreateFirewallEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_firewall_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_firewall_endpoint_rest_bad_request( + request_type=firewall_activation.DeleteFirewallEndpointRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_firewall_endpoint(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.DeleteFirewallEndpointRequest, + dict, + ], +) +def test_delete_firewall_endpoint_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_firewall_endpoint(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_firewall_endpoint_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, "post_delete_firewall_endpoint" + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_delete_firewall_endpoint_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, "pre_delete_firewall_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.DeleteFirewallEndpointRequest.pb( + firewall_activation.DeleteFirewallEndpointRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.DeleteFirewallEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_firewall_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_firewall_endpoint_rest_bad_request( + request_type=firewall_activation.UpdateFirewallEndpointRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "firewall_endpoint": { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_firewall_endpoint(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.UpdateFirewallEndpointRequest, + dict, + ], +) +def test_update_firewall_endpoint_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "firewall_endpoint": { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3" + } + } + request_init["firewall_endpoint"] = { + "name": "organizations/sample1/locations/sample2/firewallEndpoints/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "reconciling": True, + "associated_networks": [ + "associated_networks_value1", + "associated_networks_value2", + ], + "associations": [{"name": "name_value", "network": "network_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, + "billing_project_id": "billing_project_id_value", + "endpoint_settings": {"jumbo_frames_enabled": True}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = firewall_activation.UpdateFirewallEndpointRequest.meta.fields[ + "firewall_endpoint" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["firewall_endpoint"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["firewall_endpoint"][field])): + del request_init["firewall_endpoint"][field][i][subfield] + else: + del request_init["firewall_endpoint"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_firewall_endpoint(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_firewall_endpoint_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, "post_update_firewall_endpoint" + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_update_firewall_endpoint_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, "pre_update_firewall_endpoint" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.UpdateFirewallEndpointRequest.pb( + firewall_activation.UpdateFirewallEndpointRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.UpdateFirewallEndpointRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_firewall_endpoint( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_firewall_endpoint_associations_rest_bad_request( + request_type=firewall_activation.ListFirewallEndpointAssociationsRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_firewall_endpoint_associations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.ListFirewallEndpointAssociationsRequest, + dict, + ], +) +def test_list_firewall_endpoint_associations_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.ListFirewallEndpointAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.ListFirewallEndpointAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_firewall_endpoint_associations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFirewallEndpointAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_firewall_endpoint_associations_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_list_firewall_endpoint_associations", + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_list_firewall_endpoint_associations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "pre_list_firewall_endpoint_associations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.ListFirewallEndpointAssociationsRequest.pb( + firewall_activation.ListFirewallEndpointAssociationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse.to_json( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + ) + req.return_value.content = return_value + + request = firewall_activation.ListFirewallEndpointAssociationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse() + ) + post_with_metadata.return_value = ( + firewall_activation.ListFirewallEndpointAssociationsResponse(), + metadata, + ) + + client.list_firewall_endpoint_associations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_firewall_endpoint_association_rest_bad_request( + request_type=firewall_activation.GetFirewallEndpointAssociationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_firewall_endpoint_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.GetFirewallEndpointAssociationRequest, + dict, + ], +) +def test_get_firewall_endpoint_association_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firewall_activation.FirewallEndpointAssociation( + name="name_value", + state=firewall_activation.FirewallEndpointAssociation.State.CREATING, + network="network_value", + firewall_endpoint="firewall_endpoint_value", + tls_inspection_policy="tls_inspection_policy_value", + reconciling=True, + disabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firewall_activation.FirewallEndpointAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_firewall_endpoint_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, firewall_activation.FirewallEndpointAssociation) + assert response.name == "name_value" + assert ( + response.state == firewall_activation.FirewallEndpointAssociation.State.CREATING + ) + assert response.network == "network_value" + assert response.firewall_endpoint == "firewall_endpoint_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + assert response.reconciling is True + assert response.disabled is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_firewall_endpoint_association_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_get_firewall_endpoint_association", + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_get_firewall_endpoint_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "pre_get_firewall_endpoint_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.GetFirewallEndpointAssociationRequest.pb( + firewall_activation.GetFirewallEndpointAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firewall_activation.FirewallEndpointAssociation.to_json( + firewall_activation.FirewallEndpointAssociation() + ) + req.return_value.content = return_value + + request = firewall_activation.GetFirewallEndpointAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firewall_activation.FirewallEndpointAssociation() + post_with_metadata.return_value = ( + firewall_activation.FirewallEndpointAssociation(), + metadata, + ) + + client.get_firewall_endpoint_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_firewall_endpoint_association_rest_bad_request( + request_type=firewall_activation.CreateFirewallEndpointAssociationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_firewall_endpoint_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.CreateFirewallEndpointAssociationRequest, + dict, + ], +) +def test_create_firewall_endpoint_association_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["firewall_endpoint_association"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "network": "network_value", + "firewall_endpoint": "firewall_endpoint_value", + "tls_inspection_policy": "tls_inspection_policy_value", + "reconciling": True, + "disabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + firewall_activation.CreateFirewallEndpointAssociationRequest.meta.fields[ + "firewall_endpoint_association" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_endpoint_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_endpoint_association"][field]) + ): + del request_init["firewall_endpoint_association"][field][i][ + subfield + ] + else: + del request_init["firewall_endpoint_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_firewall_endpoint_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_firewall_endpoint_association_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_create_firewall_endpoint_association", + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_create_firewall_endpoint_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "pre_create_firewall_endpoint_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.CreateFirewallEndpointAssociationRequest.pb( + firewall_activation.CreateFirewallEndpointAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.CreateFirewallEndpointAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_firewall_endpoint_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_firewall_endpoint_association_rest_bad_request( + request_type=firewall_activation.DeleteFirewallEndpointAssociationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_firewall_endpoint_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.DeleteFirewallEndpointAssociationRequest, + dict, + ], +) +def test_delete_firewall_endpoint_association_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_firewall_endpoint_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_firewall_endpoint_association_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_delete_firewall_endpoint_association", + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_delete_firewall_endpoint_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "pre_delete_firewall_endpoint_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.DeleteFirewallEndpointAssociationRequest.pb( + firewall_activation.DeleteFirewallEndpointAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.DeleteFirewallEndpointAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_firewall_endpoint_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_firewall_endpoint_association_rest_bad_request( + request_type=firewall_activation.UpdateFirewallEndpointAssociationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "firewall_endpoint_association": { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_firewall_endpoint_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firewall_activation.UpdateFirewallEndpointAssociationRequest, + dict, + ], +) +def test_update_firewall_endpoint_association_rest_call_success(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "firewall_endpoint_association": { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3" + } + } + request_init["firewall_endpoint_association"] = { + "name": "projects/sample1/locations/sample2/firewallEndpointAssociations/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "network": "network_value", + "firewall_endpoint": "firewall_endpoint_value", + "tls_inspection_policy": "tls_inspection_policy_value", + "reconciling": True, + "disabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + firewall_activation.UpdateFirewallEndpointAssociationRequest.meta.fields[ + "firewall_endpoint_association" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "firewall_endpoint_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["firewall_endpoint_association"][field]) + ): + del request_init["firewall_endpoint_association"][field][i][ + subfield + ] + else: + del request_init["firewall_endpoint_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_firewall_endpoint_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_firewall_endpoint_association_rest_interceptors(null_interceptor): + transport = transports.FirewallActivationRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FirewallActivationRestInterceptor(), + ) + client = FirewallActivationClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_update_firewall_endpoint_association", + ) as post, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "post_update_firewall_endpoint_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.FirewallActivationRestInterceptor, + "pre_update_firewall_endpoint_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firewall_activation.UpdateFirewallEndpointAssociationRequest.pb( + firewall_activation.UpdateFirewallEndpointAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = firewall_activation.UpdateFirewallEndpointAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_firewall_endpoint_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_firewall_endpoints_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoints), "__call__" + ) as call: + client.list_firewall_endpoints(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_firewall_endpoint_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint), "__call__" + ) as call: + client.get_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_firewall_endpoint_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint), "__call__" + ) as call: + client.create_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_firewall_endpoint_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint), "__call__" + ) as call: + client.delete_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_firewall_endpoint_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint), "__call__" + ) as call: + client.update_firewall_endpoint(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_firewall_endpoint_associations_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_firewall_endpoint_associations), "__call__" + ) as call: + client.list_firewall_endpoint_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.ListFirewallEndpointAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_firewall_endpoint_association_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_firewall_endpoint_association), "__call__" + ) as call: + client.get_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.GetFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_firewall_endpoint_association_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_firewall_endpoint_association), "__call__" + ) as call: + client.create_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.CreateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_firewall_endpoint_association_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_firewall_endpoint_association), "__call__" + ) as call: + client.delete_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.DeleteFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_firewall_endpoint_association_empty_call_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_firewall_endpoint_association), "__call__" + ) as call: + client.update_firewall_endpoint_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firewall_activation.UpdateFirewallEndpointAssociationRequest() + + assert args[0] == request_msg + + +def test_firewall_activation_rest_lro_client(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FirewallActivationGrpcTransport, + ) + + +def test_firewall_activation_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FirewallActivationTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_firewall_activation_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.firewall_activation.transports.FirewallActivationTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.FirewallActivationTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_firewall_endpoints", + "get_firewall_endpoint", + "create_firewall_endpoint", + "delete_firewall_endpoint", + "update_firewall_endpoint", + "list_firewall_endpoint_associations", + "get_firewall_endpoint_association", + "create_firewall_endpoint_association", + "delete_firewall_endpoint_association", + "update_firewall_endpoint_association", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_firewall_activation_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.firewall_activation.transports.FirewallActivationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallActivationTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_firewall_activation_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.firewall_activation.transports.FirewallActivationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallActivationTransport() + adc.assert_called_once() + + +def test_firewall_activation_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirewallActivationClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + ], +) +def test_firewall_activation_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + transports.FirewallActivationRestTransport, + ], +) +def test_firewall_activation_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirewallActivationGrpcTransport, grpc_helpers), + (transports.FirewallActivationGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_firewall_activation_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + ], +) +def test_firewall_activation_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_firewall_activation_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FirewallActivationRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_firewall_activation_host_no_port(transport_name): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_firewall_activation_host_with_port(transport_name): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_firewall_activation_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FirewallActivationClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FirewallActivationClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_firewall_endpoints._session + session2 = client2.transport.list_firewall_endpoints._session + assert session1 != session2 + session1 = client1.transport.get_firewall_endpoint._session + session2 = client2.transport.get_firewall_endpoint._session + assert session1 != session2 + session1 = client1.transport.create_firewall_endpoint._session + session2 = client2.transport.create_firewall_endpoint._session + assert session1 != session2 + session1 = client1.transport.delete_firewall_endpoint._session + session2 = client2.transport.delete_firewall_endpoint._session + assert session1 != session2 + session1 = client1.transport.update_firewall_endpoint._session + session2 = client2.transport.update_firewall_endpoint._session + assert session1 != session2 + session1 = client1.transport.list_firewall_endpoint_associations._session + session2 = client2.transport.list_firewall_endpoint_associations._session + assert session1 != session2 + session1 = client1.transport.get_firewall_endpoint_association._session + session2 = client2.transport.get_firewall_endpoint_association._session + assert session1 != session2 + session1 = client1.transport.create_firewall_endpoint_association._session + session2 = client2.transport.create_firewall_endpoint_association._session + assert session1 != session2 + session1 = client1.transport.delete_firewall_endpoint_association._session + session2 = client2.transport.delete_firewall_endpoint_association._session + assert session1 != session2 + session1 = client1.transport.update_firewall_endpoint_association._session + session2 = client2.transport.update_firewall_endpoint_association._session + assert session1 != session2 + + +def test_firewall_activation_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirewallActivationGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_firewall_activation_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FirewallActivationGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + ], +) +def test_firewall_activation_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirewallActivationGrpcTransport, + transports.FirewallActivationGrpcAsyncIOTransport, + ], +) +def test_firewall_activation_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_firewall_activation_grpc_lro_client(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_firewall_activation_grpc_lro_async_client(): + client = FirewallActivationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_firewall_endpoint_path(): + organization = "squid" + location = "clam" + firewall_endpoint = "whelk" + expected = "organizations/{organization}/locations/{location}/firewallEndpoints/{firewall_endpoint}".format( + organization=organization, + location=location, + firewall_endpoint=firewall_endpoint, + ) + actual = FirewallActivationClient.firewall_endpoint_path( + organization, location, firewall_endpoint + ) + assert expected == actual + + +def test_parse_firewall_endpoint_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "firewall_endpoint": "nudibranch", + } + path = FirewallActivationClient.firewall_endpoint_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_firewall_endpoint_path(path) + assert expected == actual + + +def test_firewall_endpoint_association_path(): + project = "cuttlefish" + location = "mussel" + firewall_endpoint_association = "winkle" + expected = "projects/{project}/locations/{location}/firewallEndpointAssociations/{firewall_endpoint_association}".format( + project=project, + location=location, + firewall_endpoint_association=firewall_endpoint_association, + ) + actual = FirewallActivationClient.firewall_endpoint_association_path( + project, location, firewall_endpoint_association + ) + assert expected == actual + + +def test_parse_firewall_endpoint_association_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "firewall_endpoint_association": "abalone", + } + path = FirewallActivationClient.firewall_endpoint_association_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_firewall_endpoint_association_path(path) + assert expected == actual + + +def test_network_path(): + project = "squid" + network = "clam" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = FirewallActivationClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "whelk", + "network": "octopus", + } + path = FirewallActivationClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_network_path(path) + assert expected == actual + + +def test_tls_inspection_policy_path(): + project = "oyster" + location = "nudibranch" + tls_inspection_policy = "cuttlefish" + expected = "projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}".format( + project=project, + location=location, + tls_inspection_policy=tls_inspection_policy, + ) + actual = FirewallActivationClient.tls_inspection_policy_path( + project, location, tls_inspection_policy + ) + assert expected == actual + + +def test_parse_tls_inspection_policy_path(): + expected = { + "project": "mussel", + "location": "winkle", + "tls_inspection_policy": "nautilus", + } + path = FirewallActivationClient.tls_inspection_policy_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_tls_inspection_policy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FirewallActivationClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = FirewallActivationClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FirewallActivationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = FirewallActivationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = FirewallActivationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = FirewallActivationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = FirewallActivationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = FirewallActivationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = FirewallActivationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = FirewallActivationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallActivationClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.FirewallActivationTransport, "_prep_wrapped_messages" + ) as prep: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.FirewallActivationTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FirewallActivationClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = FirewallActivationClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = FirewallActivationAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = FirewallActivationAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = FirewallActivationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (FirewallActivationClient, transports.FirewallActivationGrpcTransport), + ( + FirewallActivationAsyncClient, + transports.FirewallActivationGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_intercept.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_intercept.py new file mode 100644 index 000000000000..c9557164084c --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_intercept.py @@ -0,0 +1,21193 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.intercept import ( + InterceptAsyncClient, + InterceptClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import common, intercept + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InterceptClient._get_default_mtls_endpoint(None) is None + assert InterceptClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + InterceptClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + InterceptClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + InterceptClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert InterceptClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert InterceptClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InterceptClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert InterceptClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + InterceptClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert InterceptClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert InterceptClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert InterceptClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InterceptClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert InterceptClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert InterceptClient._get_client_cert_source(None, False) is None + assert ( + InterceptClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + InterceptClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + InterceptClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + InterceptClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + InterceptClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptClient), +) +@mock.patch.object( + InterceptAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InterceptClient._DEFAULT_UNIVERSE + default_endpoint = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + InterceptClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + InterceptClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == InterceptClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InterceptClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + InterceptClient._get_api_endpoint(None, None, default_universe, "always") + == InterceptClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InterceptClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == InterceptClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InterceptClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + InterceptClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + InterceptClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + InterceptClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + InterceptClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + InterceptClient._get_universe_domain(None, None) + == InterceptClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + InterceptClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InterceptClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InterceptClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InterceptClient, "grpc"), + (InterceptAsyncClient, "grpc_asyncio"), + (InterceptClient, "rest"), + ], +) +def test_intercept_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.InterceptGrpcTransport, "grpc"), + (transports.InterceptGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.InterceptRestTransport, "rest"), + ], +) +def test_intercept_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InterceptClient, "grpc"), + (InterceptAsyncClient, "grpc_asyncio"), + (InterceptClient, "rest"), + ], +) +def test_intercept_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_intercept_client_get_transport_class(): + transport = InterceptClient.get_transport_class() + available_transports = [ + transports.InterceptGrpcTransport, + transports.InterceptRestTransport, + ] + assert transport in available_transports + + transport = InterceptClient.get_transport_class("grpc") + assert transport == transports.InterceptGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (InterceptClient, transports.InterceptGrpcTransport, "grpc"), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (InterceptClient, transports.InterceptRestTransport, "rest"), + ], +) +@mock.patch.object( + InterceptClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptClient), +) +@mock.patch.object( + InterceptAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptAsyncClient), +) +def test_intercept_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InterceptClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InterceptClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (InterceptClient, transports.InterceptGrpcTransport, "grpc", "true"), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (InterceptClient, transports.InterceptGrpcTransport, "grpc", "false"), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (InterceptClient, transports.InterceptRestTransport, "rest", "true"), + (InterceptClient, transports.InterceptRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + InterceptClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptClient), +) +@mock.patch.object( + InterceptAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_intercept_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [InterceptClient, InterceptAsyncClient]) +@mock.patch.object( + InterceptClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterceptClient) +) +@mock.patch.object( + InterceptAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(InterceptAsyncClient), +) +def test_intercept_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [InterceptClient, InterceptAsyncClient]) +@mock.patch.object( + InterceptClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptClient), +) +@mock.patch.object( + InterceptAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InterceptAsyncClient), +) +def test_intercept_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InterceptClient._DEFAULT_UNIVERSE + default_endpoint = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InterceptClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (InterceptClient, transports.InterceptGrpcTransport, "grpc"), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (InterceptClient, transports.InterceptRestTransport, "rest"), + ], +) +def test_intercept_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (InterceptClient, transports.InterceptGrpcTransport, "grpc", grpc_helpers), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (InterceptClient, transports.InterceptRestTransport, "rest", None), + ], +) +def test_intercept_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_intercept_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.intercept.transports.InterceptGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = InterceptClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (InterceptClient, transports.InterceptGrpcTransport, "grpc", grpc_helpers), + ( + InterceptAsyncClient, + transports.InterceptGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_intercept_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptEndpointGroupsRequest, + dict, + ], +) +def test_list_intercept_endpoint_groups(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptEndpointGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_intercept_endpoint_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.ListInterceptEndpointGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_intercept_endpoint_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.ListInterceptEndpointGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_intercept_endpoint_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_endpoint_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_endpoint_groups + ] = mock_rpc + request = {} + client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_intercept_endpoint_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_intercept_endpoint_groups + ] = mock_rpc + + request = {} + await client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_intercept_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_async( + transport: str = "grpc_asyncio", + request_type=intercept.ListInterceptEndpointGroupsRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptEndpointGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_async_from_dict(): + await test_list_intercept_endpoint_groups_async(request_type=dict) + + +def test_list_intercept_endpoint_groups_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptEndpointGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + call.return_value = intercept.ListInterceptEndpointGroupsResponse() + client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptEndpointGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupsResponse() + ) + await client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_intercept_endpoint_groups_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_intercept_endpoint_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_intercept_endpoint_groups_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_endpoint_groups( + intercept.ListInterceptEndpointGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_intercept_endpoint_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_intercept_endpoint_groups( + intercept.ListInterceptEndpointGroupsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_endpoint_groups_pager(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_intercept_endpoint_groups( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptEndpointGroup) for i in results) + + +def test_list_intercept_endpoint_groups_pages(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_intercept_endpoint_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_async_pager(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_intercept_endpoint_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, intercept.InterceptEndpointGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_async_pages(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_intercept_endpoint_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptEndpointGroupRequest, + dict, + ], +) +def test_get_intercept_endpoint_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroup( + name="name_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptEndpointGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + response = client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroup) + assert response.name == "name_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +def test_get_intercept_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_intercept_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.GetInterceptEndpointGroupRequest( + name="name_value", + ) + + +def test_get_intercept_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_endpoint_group + ] = mock_rpc + request = {} + client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_intercept_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_intercept_endpoint_group + ] = mock_rpc + + request = {} + await client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.GetInterceptEndpointGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroup( + name="name_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptEndpointGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + response = await client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroup) + assert response.name == "name_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_async_from_dict(): + await test_get_intercept_endpoint_group_async(request_type=dict) + + +def test_get_intercept_endpoint_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = intercept.InterceptEndpointGroup() + client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroup() + ) + await client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_intercept_endpoint_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_intercept_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_intercept_endpoint_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_endpoint_group( + intercept.GetInterceptEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_intercept_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_intercept_endpoint_group( + intercept.GetInterceptEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptEndpointGroupRequest, + dict, + ], +) +def test_create_intercept_endpoint_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_intercept_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_intercept_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.CreateInterceptEndpointGroupRequest( + parent="parent_value", + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + +def test_create_intercept_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_endpoint_group + ] = mock_rpc + request = {} + client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_intercept_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_intercept_endpoint_group + ] = mock_rpc + + request = {} + await client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.CreateInterceptEndpointGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_async_from_dict(): + await test_create_intercept_endpoint_group_async(request_type=dict) + + +def test_create_intercept_endpoint_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptEndpointGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptEndpointGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_intercept_endpoint_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_intercept_endpoint_group( + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_endpoint_group + mock_val = intercept.InterceptEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].intercept_endpoint_group_id + mock_val = "intercept_endpoint_group_id_value" + assert arg == mock_val + + +def test_create_intercept_endpoint_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_endpoint_group( + intercept.CreateInterceptEndpointGroupRequest(), + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_intercept_endpoint_group( + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_endpoint_group + mock_val = intercept.InterceptEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].intercept_endpoint_group_id + mock_val = "intercept_endpoint_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_intercept_endpoint_group( + intercept.CreateInterceptEndpointGroupRequest(), + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptEndpointGroupRequest, + dict, + ], +) +def test_update_intercept_endpoint_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_intercept_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.UpdateInterceptEndpointGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_intercept_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.UpdateInterceptEndpointGroupRequest() + + +def test_update_intercept_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_endpoint_group + ] = mock_rpc + request = {} + client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_intercept_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_intercept_endpoint_group + ] = mock_rpc + + request = {} + await client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.UpdateInterceptEndpointGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_async_from_dict(): + await test_update_intercept_endpoint_group_async(request_type=dict) + + +def test_update_intercept_endpoint_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptEndpointGroupRequest() + + request.intercept_endpoint_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_endpoint_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptEndpointGroupRequest() + + request.intercept_endpoint_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_endpoint_group.name=name_value", + ) in kw["metadata"] + + +def test_update_intercept_endpoint_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_intercept_endpoint_group( + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_endpoint_group + mock_val = intercept.InterceptEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_intercept_endpoint_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_endpoint_group( + intercept.UpdateInterceptEndpointGroupRequest(), + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_intercept_endpoint_group( + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_endpoint_group + mock_val = intercept.InterceptEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_intercept_endpoint_group( + intercept.UpdateInterceptEndpointGroupRequest(), + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptEndpointGroupRequest, + dict, + ], +) +def test_delete_intercept_endpoint_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_intercept_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_intercept_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.DeleteInterceptEndpointGroupRequest( + name="name_value", + ) + + +def test_delete_intercept_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_endpoint_group + ] = mock_rpc + request = {} + client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_intercept_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_intercept_endpoint_group + ] = mock_rpc + + request = {} + await client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.DeleteInterceptEndpointGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_async_from_dict(): + await test_delete_intercept_endpoint_group_async(request_type=dict) + + +def test_delete_intercept_endpoint_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_intercept_endpoint_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_intercept_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_intercept_endpoint_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_endpoint_group( + intercept.DeleteInterceptEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_intercept_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_intercept_endpoint_group( + intercept.DeleteInterceptEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptEndpointGroupAssociationsRequest, + dict, + ], +) +def test_list_intercept_endpoint_group_associations( + request_type, transport: str = "grpc" +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptEndpointGroupAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupAssociationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_intercept_endpoint_group_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_intercept_endpoint_group_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.ListInterceptEndpointGroupAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_intercept_endpoint_group_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_endpoint_group_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_endpoint_group_associations + ] = mock_rpc + request = {} + client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_intercept_endpoint_group_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_intercept_endpoint_group_associations + ] = mock_rpc + + request = {} + await client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_intercept_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_async( + transport: str = "grpc_asyncio", + request_type=intercept.ListInterceptEndpointGroupAssociationsRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptEndpointGroupAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_async_from_dict(): + await test_list_intercept_endpoint_group_associations_async(request_type=dict) + + +def test_list_intercept_endpoint_group_associations_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptEndpointGroupAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + call.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptEndpointGroupAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupAssociationsResponse() + ) + await client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_intercept_endpoint_group_associations_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_intercept_endpoint_group_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_intercept_endpoint_group_associations_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_endpoint_group_associations( + intercept.ListInterceptEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_intercept_endpoint_group_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_intercept_endpoint_group_associations( + intercept.ListInterceptEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_endpoint_group_associations_pager(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_intercept_endpoint_group_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, intercept.InterceptEndpointGroupAssociation) for i in results + ) + + +def test_list_intercept_endpoint_group_associations_pages(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + pages = list( + client.list_intercept_endpoint_group_associations(request={}).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_async_pager(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_intercept_endpoint_group_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, intercept.InterceptEndpointGroupAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_async_pages(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_intercept_endpoint_group_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_get_intercept_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroupAssociation( + name="name_value", + intercept_endpoint_group="intercept_endpoint_group_value", + network="network_value", + state=intercept.InterceptEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + response = client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroupAssociation) + assert response.name == "name_value" + assert response.intercept_endpoint_group == "intercept_endpoint_group_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +def test_get_intercept_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_intercept_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.GetInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + +def test_get_intercept_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_endpoint_group_association + ] = mock_rpc + request = {} + client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_intercept_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + await client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=intercept.GetInterceptEndpointGroupAssociationRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroupAssociation( + name="name_value", + intercept_endpoint_group="intercept_endpoint_group_value", + network="network_value", + state=intercept.InterceptEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + ) + response = await client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroupAssociation) + assert response.name == "name_value" + assert response.intercept_endpoint_group == "intercept_endpoint_group_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_async_from_dict(): + await test_get_intercept_endpoint_group_association_async(request_type=dict) + + +def test_get_intercept_endpoint_group_association_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = intercept.InterceptEndpointGroupAssociation() + client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroupAssociation() + ) + await client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_intercept_endpoint_group_association_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroupAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_intercept_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_intercept_endpoint_group_association_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_endpoint_group_association( + intercept.GetInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptEndpointGroupAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroupAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_intercept_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_intercept_endpoint_group_association( + intercept.GetInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_create_intercept_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_intercept_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_intercept_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.CreateInterceptEndpointGroupAssociationRequest( + parent="parent_value", + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + +def test_create_intercept_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_endpoint_group_association + ] = mock_rpc + request = {} + client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_intercept_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + await client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=intercept.CreateInterceptEndpointGroupAssociationRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_async_from_dict(): + await test_create_intercept_endpoint_group_association_async(request_type=dict) + + +def test_create_intercept_endpoint_group_association_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptEndpointGroupAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptEndpointGroupAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_intercept_endpoint_group_association_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_intercept_endpoint_group_association( + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_endpoint_group_association + mock_val = intercept.InterceptEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].intercept_endpoint_group_association_id + mock_val = "intercept_endpoint_group_association_id_value" + assert arg == mock_val + + +def test_create_intercept_endpoint_group_association_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_endpoint_group_association( + intercept.CreateInterceptEndpointGroupAssociationRequest(), + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_intercept_endpoint_group_association( + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_endpoint_group_association + mock_val = intercept.InterceptEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].intercept_endpoint_group_association_id + mock_val = "intercept_endpoint_group_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_intercept_endpoint_group_association( + intercept.CreateInterceptEndpointGroupAssociationRequest(), + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_update_intercept_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_intercept_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_intercept_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.UpdateInterceptEndpointGroupAssociationRequest() + + +def test_update_intercept_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_endpoint_group_association + ] = mock_rpc + request = {} + client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_intercept_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + await client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=intercept.UpdateInterceptEndpointGroupAssociationRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_async_from_dict(): + await test_update_intercept_endpoint_group_association_async(request_type=dict) + + +def test_update_intercept_endpoint_group_association_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + request.intercept_endpoint_group_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_endpoint_group_association.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + request.intercept_endpoint_group_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_endpoint_group_association.name=name_value", + ) in kw["metadata"] + + +def test_update_intercept_endpoint_group_association_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_intercept_endpoint_group_association( + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_endpoint_group_association + mock_val = intercept.InterceptEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_intercept_endpoint_group_association_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_endpoint_group_association( + intercept.UpdateInterceptEndpointGroupAssociationRequest(), + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_intercept_endpoint_group_association( + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_endpoint_group_association + mock_val = intercept.InterceptEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_intercept_endpoint_group_association( + intercept.UpdateInterceptEndpointGroupAssociationRequest(), + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_delete_intercept_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_intercept_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_intercept_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.DeleteInterceptEndpointGroupAssociationRequest( + name="name_value", + ) + + +def test_delete_intercept_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_endpoint_group_association + ] = mock_rpc + request = {} + client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_intercept_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + await client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=intercept.DeleteInterceptEndpointGroupAssociationRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_async_from_dict(): + await test_delete_intercept_endpoint_group_association_async(request_type=dict) + + +def test_delete_intercept_endpoint_group_association_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_intercept_endpoint_group_association_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_intercept_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_intercept_endpoint_group_association_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_endpoint_group_association( + intercept.DeleteInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_intercept_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_intercept_endpoint_group_association( + intercept.DeleteInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptDeploymentGroupsRequest, + dict, + ], +) +def test_list_intercept_deployment_groups(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_intercept_deployment_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_intercept_deployment_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.ListInterceptDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_intercept_deployment_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_deployment_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_deployment_groups + ] = mock_rpc + request = {} + client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_intercept_deployment_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_intercept_deployment_groups + ] = mock_rpc + + request = {} + await client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_intercept_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_async( + transport: str = "grpc_asyncio", + request_type=intercept.ListInterceptDeploymentGroupsRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_async_from_dict(): + await test_list_intercept_deployment_groups_async(request_type=dict) + + +def test_list_intercept_deployment_groups_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptDeploymentGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + call.return_value = intercept.ListInterceptDeploymentGroupsResponse() + client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptDeploymentGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentGroupsResponse() + ) + await client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_intercept_deployment_groups_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_intercept_deployment_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_intercept_deployment_groups_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_deployment_groups( + intercept.ListInterceptDeploymentGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_intercept_deployment_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_intercept_deployment_groups( + intercept.ListInterceptDeploymentGroupsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_deployment_groups_pager(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_intercept_deployment_groups( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptDeploymentGroup) for i in results) + + +def test_list_intercept_deployment_groups_pages(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_intercept_deployment_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_async_pager(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_intercept_deployment_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, intercept.InterceptDeploymentGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_async_pages(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_intercept_deployment_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptDeploymentGroupRequest, + dict, + ], +) +def test_get_intercept_deployment_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeploymentGroup( + name="name_value", + network="network_value", + state=intercept.InterceptDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + response = client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +def test_get_intercept_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_intercept_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.GetInterceptDeploymentGroupRequest( + name="name_value", + ) + + +def test_get_intercept_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_deployment_group + ] = mock_rpc + request = {} + client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_intercept_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_intercept_deployment_group + ] = mock_rpc + + request = {} + await client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.GetInterceptDeploymentGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeploymentGroup( + name="name_value", + network="network_value", + state=intercept.InterceptDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + response = await client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_async_from_dict(): + await test_get_intercept_deployment_group_async(request_type=dict) + + +def test_get_intercept_deployment_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + call.return_value = intercept.InterceptDeploymentGroup() + client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeploymentGroup() + ) + await client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_intercept_deployment_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeploymentGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_intercept_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_intercept_deployment_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_deployment_group( + intercept.GetInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeploymentGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeploymentGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_intercept_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_intercept_deployment_group( + intercept.GetInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptDeploymentGroupRequest, + dict, + ], +) +def test_create_intercept_deployment_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_intercept_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_intercept_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.CreateInterceptDeploymentGroupRequest( + parent="parent_value", + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + +def test_create_intercept_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_deployment_group + ] = mock_rpc + request = {} + client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_intercept_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_intercept_deployment_group + ] = mock_rpc + + request = {} + await client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.CreateInterceptDeploymentGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_async_from_dict(): + await test_create_intercept_deployment_group_async(request_type=dict) + + +def test_create_intercept_deployment_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptDeploymentGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptDeploymentGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_intercept_deployment_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_intercept_deployment_group( + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_deployment_group + mock_val = intercept.InterceptDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].intercept_deployment_group_id + mock_val = "intercept_deployment_group_id_value" + assert arg == mock_val + + +def test_create_intercept_deployment_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_deployment_group( + intercept.CreateInterceptDeploymentGroupRequest(), + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_intercept_deployment_group( + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_deployment_group + mock_val = intercept.InterceptDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].intercept_deployment_group_id + mock_val = "intercept_deployment_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_intercept_deployment_group( + intercept.CreateInterceptDeploymentGroupRequest(), + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptDeploymentGroupRequest, + dict, + ], +) +def test_update_intercept_deployment_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_intercept_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.UpdateInterceptDeploymentGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_intercept_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.UpdateInterceptDeploymentGroupRequest() + + +def test_update_intercept_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_deployment_group + ] = mock_rpc + request = {} + client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_intercept_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_intercept_deployment_group + ] = mock_rpc + + request = {} + await client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.UpdateInterceptDeploymentGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_async_from_dict(): + await test_update_intercept_deployment_group_async(request_type=dict) + + +def test_update_intercept_deployment_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptDeploymentGroupRequest() + + request.intercept_deployment_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_deployment_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptDeploymentGroupRequest() + + request.intercept_deployment_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_deployment_group.name=name_value", + ) in kw["metadata"] + + +def test_update_intercept_deployment_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_intercept_deployment_group( + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_deployment_group + mock_val = intercept.InterceptDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_intercept_deployment_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_deployment_group( + intercept.UpdateInterceptDeploymentGroupRequest(), + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_intercept_deployment_group( + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_deployment_group + mock_val = intercept.InterceptDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_intercept_deployment_group( + intercept.UpdateInterceptDeploymentGroupRequest(), + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptDeploymentGroupRequest, + dict, + ], +) +def test_delete_intercept_deployment_group(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_intercept_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_intercept_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.DeleteInterceptDeploymentGroupRequest( + name="name_value", + ) + + +def test_delete_intercept_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_deployment_group + ] = mock_rpc + request = {} + client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_intercept_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_intercept_deployment_group + ] = mock_rpc + + request = {} + await client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=intercept.DeleteInterceptDeploymentGroupRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_async_from_dict(): + await test_delete_intercept_deployment_group_async(request_type=dict) + + +def test_delete_intercept_deployment_group_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_intercept_deployment_group_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_intercept_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_intercept_deployment_group_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_deployment_group( + intercept.DeleteInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_intercept_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_intercept_deployment_group( + intercept.DeleteInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptDeploymentsRequest, + dict, + ], +) +def test_list_intercept_deployments(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptDeploymentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_intercept_deployments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.ListInterceptDeploymentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_intercept_deployments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.ListInterceptDeploymentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_intercept_deployments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_deployments + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_deployments + ] = mock_rpc + request = {} + client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_intercept_deployments + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_intercept_deployments + ] = mock_rpc + + request = {} + await client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_intercept_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_async( + transport: str = "grpc_asyncio", + request_type=intercept.ListInterceptDeploymentsRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.ListInterceptDeploymentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_async_from_dict(): + await test_list_intercept_deployments_async(request_type=dict) + + +def test_list_intercept_deployments_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + call.return_value = intercept.ListInterceptDeploymentsResponse() + client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.ListInterceptDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentsResponse() + ) + await client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_intercept_deployments_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_intercept_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_intercept_deployments_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_deployments( + intercept.ListInterceptDeploymentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.ListInterceptDeploymentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_intercept_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_intercept_deployments( + intercept.ListInterceptDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_deployments_pager(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_intercept_deployments( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptDeployment) for i in results) + + +def test_list_intercept_deployments_pages(transport_name: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_intercept_deployments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_async_pager(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_intercept_deployments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, intercept.InterceptDeployment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_intercept_deployments_async_pages(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_intercept_deployments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptDeploymentRequest, + dict, + ], +) +def test_get_intercept_deployment(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + response = client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +def test_get_intercept_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.GetInterceptDeploymentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_intercept_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.GetInterceptDeploymentRequest( + name="name_value", + ) + + +def test_get_intercept_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_deployment + ] = mock_rpc + request = {} + client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_intercept_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_intercept_deployment + ] = mock_rpc + + request = {} + await client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_async( + transport: str = "grpc_asyncio", + request_type=intercept.GetInterceptDeploymentRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + response = await client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.GetInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_async_from_dict(): + await test_get_intercept_deployment_async(request_type=dict) + + +def test_get_intercept_deployment_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + call.return_value = intercept.InterceptDeployment() + client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.GetInterceptDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeployment() + ) + await client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_intercept_deployment_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_intercept_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_intercept_deployment_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_deployment( + intercept.GetInterceptDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = intercept.InterceptDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_intercept_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_intercept_deployment_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_intercept_deployment( + intercept.GetInterceptDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptDeploymentRequest, + dict, + ], +) +def test_create_intercept_deployment(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_intercept_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_intercept_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.CreateInterceptDeploymentRequest( + parent="parent_value", + intercept_deployment_id="intercept_deployment_id_value", + ) + + +def test_create_intercept_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_deployment + ] = mock_rpc + request = {} + client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_intercept_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_intercept_deployment + ] = mock_rpc + + request = {} + await client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_async( + transport: str = "grpc_asyncio", + request_type=intercept.CreateInterceptDeploymentRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.CreateInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_async_from_dict(): + await test_create_intercept_deployment_async(request_type=dict) + + +def test_create_intercept_deployment_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.CreateInterceptDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_intercept_deployment_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_intercept_deployment( + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_deployment + mock_val = intercept.InterceptDeployment(name="name_value") + assert arg == mock_val + arg = args[0].intercept_deployment_id + mock_val = "intercept_deployment_id_value" + assert arg == mock_val + + +def test_create_intercept_deployment_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_deployment( + intercept.CreateInterceptDeploymentRequest(), + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_intercept_deployment( + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].intercept_deployment + mock_val = intercept.InterceptDeployment(name="name_value") + assert arg == mock_val + arg = args[0].intercept_deployment_id + mock_val = "intercept_deployment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_intercept_deployment_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_intercept_deployment( + intercept.CreateInterceptDeploymentRequest(), + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptDeploymentRequest, + dict, + ], +) +def test_update_intercept_deployment(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_intercept_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.UpdateInterceptDeploymentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_intercept_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.UpdateInterceptDeploymentRequest() + + +def test_update_intercept_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_deployment + ] = mock_rpc + request = {} + client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_intercept_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_intercept_deployment + ] = mock_rpc + + request = {} + await client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_async( + transport: str = "grpc_asyncio", + request_type=intercept.UpdateInterceptDeploymentRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.UpdateInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_async_from_dict(): + await test_update_intercept_deployment_async(request_type=dict) + + +def test_update_intercept_deployment_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptDeploymentRequest() + + request.intercept_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_deployment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.UpdateInterceptDeploymentRequest() + + request.intercept_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "intercept_deployment.name=name_value", + ) in kw["metadata"] + + +def test_update_intercept_deployment_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_intercept_deployment( + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_deployment + mock_val = intercept.InterceptDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_intercept_deployment_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_deployment( + intercept.UpdateInterceptDeploymentRequest(), + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_intercept_deployment( + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].intercept_deployment + mock_val = intercept.InterceptDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_intercept_deployment_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_intercept_deployment( + intercept.UpdateInterceptDeploymentRequest(), + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptDeploymentRequest, + dict, + ], +) +def test_delete_intercept_deployment(request_type, transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_intercept_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = intercept.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_intercept_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intercept.DeleteInterceptDeploymentRequest( + name="name_value", + ) + + +def test_delete_intercept_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_deployment + ] = mock_rpc + request = {} + client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_intercept_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_intercept_deployment + ] = mock_rpc + + request = {} + await client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_async( + transport: str = "grpc_asyncio", + request_type=intercept.DeleteInterceptDeploymentRequest, +): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = intercept.DeleteInterceptDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_async_from_dict(): + await test_delete_intercept_deployment_async(request_type=dict) + + +def test_delete_intercept_deployment_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intercept.DeleteInterceptDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_intercept_deployment_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_intercept_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_intercept_deployment_flattened_error(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_deployment( + intercept.DeleteInterceptDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_flattened_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_intercept_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_intercept_deployment_flattened_error_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_intercept_deployment( + intercept.DeleteInterceptDeploymentRequest(), + name="name_value", + ) + + +def test_list_intercept_endpoint_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_endpoint_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_endpoint_groups + ] = mock_rpc + + request = {} + client.list_intercept_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_intercept_endpoint_groups_rest_required_fields( + request_type=intercept.ListInterceptEndpointGroupsRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_endpoint_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_endpoint_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_intercept_endpoint_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_intercept_endpoint_groups_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_intercept_endpoint_groups._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_intercept_endpoint_groups_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_intercept_endpoint_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_list_intercept_endpoint_groups_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_endpoint_groups( + intercept.ListInterceptEndpointGroupsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_endpoint_groups_rest_pager(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupsResponse( + intercept_endpoint_groups=[ + intercept.InterceptEndpointGroup(), + intercept.InterceptEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + intercept.ListInterceptEndpointGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_intercept_endpoint_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptEndpointGroup) for i in results) + + pages = list( + client.list_intercept_endpoint_groups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_intercept_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_endpoint_group + ] = mock_rpc + + request = {} + client.get_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_intercept_endpoint_group_rest_required_fields( + request_type=intercept.GetInterceptEndpointGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_intercept_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_intercept_endpoint_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_intercept_endpoint_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_intercept_endpoint_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_intercept_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_intercept_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_endpoint_group( + intercept.GetInterceptEndpointGroupRequest(), + name="name_value", + ) + + +def test_create_intercept_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_endpoint_group + ] = mock_rpc + + request = {} + client.create_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_intercept_endpoint_group_rest_required_fields( + request_type=intercept.CreateInterceptEndpointGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["intercept_endpoint_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "interceptEndpointGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "interceptEndpointGroupId" in jsonified_request + assert ( + jsonified_request["interceptEndpointGroupId"] + == request_init["intercept_endpoint_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["interceptEndpointGroupId"] = "intercept_endpoint_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "intercept_endpoint_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "interceptEndpointGroupId" in jsonified_request + assert ( + jsonified_request["interceptEndpointGroupId"] + == "intercept_endpoint_group_id_value" + ) + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_intercept_endpoint_group(request) + + expected_params = [ + ( + "interceptEndpointGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_intercept_endpoint_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_intercept_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "interceptEndpointGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "interceptEndpointGroupId", + "interceptEndpointGroup", + ) + ) + ) + + +def test_create_intercept_endpoint_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_intercept_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_create_intercept_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_endpoint_group( + intercept.CreateInterceptEndpointGroupRequest(), + parent="parent_value", + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + intercept_endpoint_group_id="intercept_endpoint_group_id_value", + ) + + +def test_update_intercept_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_endpoint_group + ] = mock_rpc + + request = {} + client.update_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_intercept_endpoint_group_rest_required_fields( + request_type=intercept.UpdateInterceptEndpointGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_intercept_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_intercept_endpoint_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_intercept_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("interceptEndpointGroup",)) + ) + + +def test_update_intercept_endpoint_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "intercept_endpoint_group": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_intercept_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{intercept_endpoint_group.name=projects/*/locations/*/interceptEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_intercept_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_endpoint_group( + intercept.UpdateInterceptEndpointGroupRequest(), + intercept_endpoint_group=intercept.InterceptEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_intercept_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_endpoint_group + ] = mock_rpc + + request = {} + client.delete_intercept_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_intercept_endpoint_group_rest_required_fields( + request_type=intercept.DeleteInterceptEndpointGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_intercept_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_intercept_endpoint_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_intercept_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_intercept_endpoint_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_intercept_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_intercept_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_endpoint_group( + intercept.DeleteInterceptEndpointGroupRequest(), + name="name_value", + ) + + +def test_list_intercept_endpoint_group_associations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_endpoint_group_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_endpoint_group_associations + ] = mock_rpc + + request = {} + client.list_intercept_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_intercept_endpoint_group_associations_rest_required_fields( + request_type=intercept.ListInterceptEndpointGroupAssociationsRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_endpoint_group_associations._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_endpoint_group_associations._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_intercept_endpoint_group_associations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_intercept_endpoint_group_associations_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_intercept_endpoint_group_associations._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_intercept_endpoint_group_associations_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_intercept_endpoint_group_associations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroupAssociations" + % client.transport._host, + args[1], + ) + + +def test_list_intercept_endpoint_group_associations_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_endpoint_group_associations( + intercept.ListInterceptEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_endpoint_group_associations_rest_pager(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[], + next_page_token="def", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptEndpointGroupAssociationsResponse( + intercept_endpoint_group_associations=[ + intercept.InterceptEndpointGroupAssociation(), + intercept.InterceptEndpointGroupAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + intercept.ListInterceptEndpointGroupAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_intercept_endpoint_group_associations( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, intercept.InterceptEndpointGroupAssociation) for i in results + ) + + pages = list( + client.list_intercept_endpoint_group_associations( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_intercept_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + client.get_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_intercept_endpoint_group_association_rest_required_fields( + request_type=intercept.GetInterceptEndpointGroupAssociationRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroupAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_intercept_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_intercept_endpoint_group_association_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_intercept_endpoint_group_association._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_intercept_endpoint_group_association_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroupAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_intercept_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_intercept_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_endpoint_group_association( + intercept.GetInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +def test_create_intercept_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + client.create_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_intercept_endpoint_group_association_rest_required_fields( + request_type=intercept.CreateInterceptEndpointGroupAssociationRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "intercept_endpoint_group_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_intercept_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_intercept_endpoint_group_association_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_intercept_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "interceptEndpointGroupAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "interceptEndpointGroupAssociation", + ) + ) + ) + + +def test_create_intercept_endpoint_group_association_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_intercept_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptEndpointGroupAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_intercept_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_endpoint_group_association( + intercept.CreateInterceptEndpointGroupAssociationRequest(), + parent="parent_value", + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + intercept_endpoint_group_association_id="intercept_endpoint_group_association_id_value", + ) + + +def test_update_intercept_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + client.update_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_intercept_endpoint_group_association_rest_required_fields( + request_type=intercept.UpdateInterceptEndpointGroupAssociationRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_intercept_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_intercept_endpoint_group_association_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_intercept_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("interceptEndpointGroupAssociation",)) + ) + + +def test_update_intercept_endpoint_group_association_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "intercept_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_intercept_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{intercept_endpoint_group_association.name=projects/*/locations/*/interceptEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_intercept_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_endpoint_group_association( + intercept.UpdateInterceptEndpointGroupAssociationRequest(), + intercept_endpoint_group_association=intercept.InterceptEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_intercept_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_endpoint_group_association + ] = mock_rpc + + request = {} + client.delete_intercept_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_intercept_endpoint_group_association_rest_required_fields( + request_type=intercept.DeleteInterceptEndpointGroupAssociationRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_intercept_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_intercept_endpoint_group_association_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_intercept_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_intercept_endpoint_group_association_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_intercept_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_intercept_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_endpoint_group_association( + intercept.DeleteInterceptEndpointGroupAssociationRequest(), + name="name_value", + ) + + +def test_list_intercept_deployment_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_deployment_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_deployment_groups + ] = mock_rpc + + request = {} + client.list_intercept_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_intercept_deployment_groups_rest_required_fields( + request_type=intercept.ListInterceptDeploymentGroupsRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_deployment_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_deployment_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentGroupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_intercept_deployment_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_intercept_deployment_groups_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_intercept_deployment_groups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_intercept_deployment_groups_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_intercept_deployment_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptDeploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_list_intercept_deployment_groups_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_deployment_groups( + intercept.ListInterceptDeploymentGroupsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_deployment_groups_rest_pager(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentGroupsResponse( + intercept_deployment_groups=[ + intercept.InterceptDeploymentGroup(), + intercept.InterceptDeploymentGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + intercept.ListInterceptDeploymentGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_intercept_deployment_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptDeploymentGroup) for i in results) + + pages = list( + client.list_intercept_deployment_groups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_intercept_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_deployment_group + ] = mock_rpc + + request = {} + client.get_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_intercept_deployment_group_rest_required_fields( + request_type=intercept.GetInterceptDeploymentGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeploymentGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_intercept_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_intercept_deployment_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_intercept_deployment_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_intercept_deployment_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeploymentGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.InterceptDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_intercept_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_intercept_deployment_group_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_deployment_group( + intercept.GetInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +def test_create_intercept_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_deployment_group + ] = mock_rpc + + request = {} + client.create_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_intercept_deployment_group_rest_required_fields( + request_type=intercept.CreateInterceptDeploymentGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["intercept_deployment_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "interceptDeploymentGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "interceptDeploymentGroupId" in jsonified_request + assert ( + jsonified_request["interceptDeploymentGroupId"] + == request_init["intercept_deployment_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "interceptDeploymentGroupId" + ] = "intercept_deployment_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "intercept_deployment_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "interceptDeploymentGroupId" in jsonified_request + assert ( + jsonified_request["interceptDeploymentGroupId"] + == "intercept_deployment_group_id_value" + ) + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_intercept_deployment_group(request) + + expected_params = [ + ( + "interceptDeploymentGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_intercept_deployment_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_intercept_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "interceptDeploymentGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "interceptDeploymentGroupId", + "interceptDeploymentGroup", + ) + ) + ) + + +def test_create_intercept_deployment_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_intercept_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptDeploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_create_intercept_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_deployment_group( + intercept.CreateInterceptDeploymentGroupRequest(), + parent="parent_value", + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + intercept_deployment_group_id="intercept_deployment_group_id_value", + ) + + +def test_update_intercept_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_deployment_group + ] = mock_rpc + + request = {} + client.update_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_intercept_deployment_group_rest_required_fields( + request_type=intercept.UpdateInterceptDeploymentGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_intercept_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_intercept_deployment_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.update_intercept_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("interceptDeploymentGroup",)) + ) + + +def test_update_intercept_deployment_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "intercept_deployment_group": { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_intercept_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{intercept_deployment_group.name=projects/*/locations/*/interceptDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_intercept_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_deployment_group( + intercept.UpdateInterceptDeploymentGroupRequest(), + intercept_deployment_group=intercept.InterceptDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_intercept_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_deployment_group + ] = mock_rpc + + request = {} + client.delete_intercept_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_intercept_deployment_group_rest_required_fields( + request_type=intercept.DeleteInterceptDeploymentGroupRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_intercept_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_intercept_deployment_group_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_intercept_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_intercept_deployment_group_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_intercept_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_intercept_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_deployment_group( + intercept.DeleteInterceptDeploymentGroupRequest(), + name="name_value", + ) + + +def test_list_intercept_deployments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_intercept_deployments + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_intercept_deployments + ] = mock_rpc + + request = {} + client.list_intercept_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_intercept_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_intercept_deployments_rest_required_fields( + request_type=intercept.ListInterceptDeploymentsRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_intercept_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_intercept_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_intercept_deployments_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_intercept_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_intercept_deployments_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_intercept_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptDeployments" + % client.transport._host, + args[1], + ) + + +def test_list_intercept_deployments_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_intercept_deployments( + intercept.ListInterceptDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_intercept_deployments_rest_pager(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + next_page_token="abc", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[], + next_page_token="def", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + ], + next_page_token="ghi", + ), + intercept.ListInterceptDeploymentsResponse( + intercept_deployments=[ + intercept.InterceptDeployment(), + intercept.InterceptDeployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + intercept.ListInterceptDeploymentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_intercept_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, intercept.InterceptDeployment) for i in results) + + pages = list(client.list_intercept_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_intercept_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_intercept_deployment + ] = mock_rpc + + request = {} + client.get_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_intercept_deployment_rest_required_fields( + request_type=intercept.GetInterceptDeploymentRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_intercept_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_intercept_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_intercept_deployment_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_intercept_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_intercept_deployment_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = intercept.InterceptDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_intercept_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_intercept_deployment_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_intercept_deployment( + intercept.GetInterceptDeploymentRequest(), + name="name_value", + ) + + +def test_create_intercept_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_intercept_deployment + ] = mock_rpc + + request = {} + client.create_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_intercept_deployment_rest_required_fields( + request_type=intercept.CreateInterceptDeploymentRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["intercept_deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "interceptDeploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "interceptDeploymentId" in jsonified_request + assert ( + jsonified_request["interceptDeploymentId"] + == request_init["intercept_deployment_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["interceptDeploymentId"] = "intercept_deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_intercept_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "intercept_deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "interceptDeploymentId" in jsonified_request + assert jsonified_request["interceptDeploymentId"] == "intercept_deployment_id_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_intercept_deployment(request) + + expected_params = [ + ( + "interceptDeploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_intercept_deployment_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_intercept_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "interceptDeploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "interceptDeploymentId", + "interceptDeployment", + ) + ) + ) + + +def test_create_intercept_deployment_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_intercept_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/interceptDeployments" + % client.transport._host, + args[1], + ) + + +def test_create_intercept_deployment_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_intercept_deployment( + intercept.CreateInterceptDeploymentRequest(), + parent="parent_value", + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + intercept_deployment_id="intercept_deployment_id_value", + ) + + +def test_update_intercept_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_intercept_deployment + ] = mock_rpc + + request = {} + client.update_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_intercept_deployment_rest_required_fields( + request_type=intercept.UpdateInterceptDeploymentRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_intercept_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_intercept_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_intercept_deployment_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_intercept_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("interceptDeployment",)) + ) + + +def test_update_intercept_deployment_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "intercept_deployment": { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_intercept_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{intercept_deployment.name=projects/*/locations/*/interceptDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_intercept_deployment_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_intercept_deployment( + intercept.UpdateInterceptDeploymentRequest(), + intercept_deployment=intercept.InterceptDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_intercept_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_intercept_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_intercept_deployment + ] = mock_rpc + + request = {} + client.delete_intercept_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_intercept_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_intercept_deployment_rest_required_fields( + request_type=intercept.DeleteInterceptDeploymentRequest, +): + transport_class = transports.InterceptRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_intercept_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_intercept_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_intercept_deployment_rest_unset_required_fields(): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_intercept_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_intercept_deployment_rest_flattened(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_intercept_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/interceptDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_intercept_deployment_rest_flattened_error(transport: str = "rest"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_intercept_deployment( + intercept.DeleteInterceptDeploymentRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterceptClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterceptClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterceptClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterceptClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InterceptClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterceptGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.InterceptGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InterceptGrpcTransport, + transports.InterceptGrpcAsyncIOTransport, + transports.InterceptRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = InterceptClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_endpoint_groups_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + call.return_value = intercept.ListInterceptEndpointGroupsResponse() + client.list_intercept_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_endpoint_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = intercept.InterceptEndpointGroup() + client.get_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_endpoint_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_endpoint_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_endpoint_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_endpoint_group_associations_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + call.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + client.list_intercept_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_endpoint_group_association_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = intercept.InterceptEndpointGroupAssociation() + client.get_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_endpoint_group_association_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_endpoint_group_association_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_endpoint_group_association_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_deployment_groups_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + call.return_value = intercept.ListInterceptDeploymentGroupsResponse() + client.list_intercept_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_deployment_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + call.return_value = intercept.InterceptDeploymentGroup() + client.get_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_deployment_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_deployment_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_deployment_group_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_deployments_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + call.return_value = intercept.ListInterceptDeploymentsResponse() + client.list_intercept_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_deployment_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + call.return_value = intercept.InterceptDeployment() + client.get_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_deployment_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_deployment_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_deployment_empty_call_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = InterceptAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_intercept_endpoint_groups_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_intercept_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroup( + name="name_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptEndpointGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + await client.get_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_intercept_endpoint_group_associations_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_intercept_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_intercept_endpoint_group_association_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptEndpointGroupAssociation( + name="name_value", + intercept_endpoint_group="intercept_endpoint_group_value", + network="network_value", + state=intercept.InterceptEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + ) + await client.get_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_intercept_endpoint_group_association_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_intercept_endpoint_group_association_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_intercept_endpoint_group_association_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_intercept_deployment_groups_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_intercept_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_intercept_deployment_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeploymentGroup( + name="name_value", + network="network_value", + state=intercept.InterceptDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + await client.get_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_intercept_deployment_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_intercept_deployment_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_intercept_deployment_group_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_intercept_deployments_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.ListInterceptDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_intercept_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_intercept_deployment_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + intercept.InterceptDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + await client.get_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_intercept_deployment_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_intercept_deployment_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_intercept_deployment_empty_call_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = InterceptClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_intercept_endpoint_groups_rest_bad_request( + request_type=intercept.ListInterceptEndpointGroupsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_intercept_endpoint_groups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptEndpointGroupsRequest, + dict, + ], +) +def test_list_intercept_endpoint_groups_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_intercept_endpoint_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_intercept_endpoint_groups_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_list_intercept_endpoint_groups" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_list_intercept_endpoint_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_list_intercept_endpoint_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.ListInterceptEndpointGroupsRequest.pb( + intercept.ListInterceptEndpointGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.ListInterceptEndpointGroupsResponse.to_json( + intercept.ListInterceptEndpointGroupsResponse() + ) + req.return_value.content = return_value + + request = intercept.ListInterceptEndpointGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.ListInterceptEndpointGroupsResponse() + post_with_metadata.return_value = ( + intercept.ListInterceptEndpointGroupsResponse(), + metadata, + ) + + client.list_intercept_endpoint_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_intercept_endpoint_group_rest_bad_request( + request_type=intercept.GetInterceptEndpointGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_intercept_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptEndpointGroupRequest, + dict, + ], +) +def test_get_intercept_endpoint_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroup( + name="name_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptEndpointGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_intercept_endpoint_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroup) + assert response.name == "name_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_intercept_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_get_intercept_endpoint_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_get_intercept_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_get_intercept_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.GetInterceptEndpointGroupRequest.pb( + intercept.GetInterceptEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.InterceptEndpointGroup.to_json( + intercept.InterceptEndpointGroup() + ) + req.return_value.content = return_value + + request = intercept.GetInterceptEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.InterceptEndpointGroup() + post_with_metadata.return_value = intercept.InterceptEndpointGroup(), metadata + + client.get_intercept_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_intercept_endpoint_group_rest_bad_request( + request_type=intercept.CreateInterceptEndpointGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_intercept_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptEndpointGroupRequest, + dict, + ], +) +def test_create_intercept_endpoint_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["intercept_endpoint_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "intercept_deployment_group": "intercept_deployment_group_value", + "connected_deployment_group": { + "name": "name_value", + "locations": [{"location": "location_value", "state": 1}], + }, + "state": 1, + "reconciling": True, + "associations": [ + {"name": "name_value", "network": "network_value", "state": 1} + ], + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.CreateInterceptEndpointGroupRequest.meta.fields[ + "intercept_endpoint_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_endpoint_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["intercept_endpoint_group"][field])): + del request_init["intercept_endpoint_group"][field][i][subfield] + else: + del request_init["intercept_endpoint_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_intercept_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_intercept_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_create_intercept_endpoint_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_create_intercept_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_create_intercept_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.CreateInterceptEndpointGroupRequest.pb( + intercept.CreateInterceptEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.CreateInterceptEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_intercept_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_intercept_endpoint_group_rest_bad_request( + request_type=intercept.UpdateInterceptEndpointGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "intercept_endpoint_group": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_intercept_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptEndpointGroupRequest, + dict, + ], +) +def test_update_intercept_endpoint_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "intercept_endpoint_group": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + } + request_init["intercept_endpoint_group"] = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "intercept_deployment_group": "intercept_deployment_group_value", + "connected_deployment_group": { + "name": "name_value", + "locations": [{"location": "location_value", "state": 1}], + }, + "state": 1, + "reconciling": True, + "associations": [ + {"name": "name_value", "network": "network_value", "state": 1} + ], + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.UpdateInterceptEndpointGroupRequest.meta.fields[ + "intercept_endpoint_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_endpoint_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["intercept_endpoint_group"][field])): + del request_init["intercept_endpoint_group"][field][i][subfield] + else: + del request_init["intercept_endpoint_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_intercept_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_intercept_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_update_intercept_endpoint_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_update_intercept_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_update_intercept_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.UpdateInterceptEndpointGroupRequest.pb( + intercept.UpdateInterceptEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.UpdateInterceptEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_intercept_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_intercept_endpoint_group_rest_bad_request( + request_type=intercept.DeleteInterceptEndpointGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_intercept_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptEndpointGroupRequest, + dict, + ], +) +def test_delete_intercept_endpoint_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_intercept_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_intercept_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_delete_intercept_endpoint_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_delete_intercept_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_delete_intercept_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.DeleteInterceptEndpointGroupRequest.pb( + intercept.DeleteInterceptEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.DeleteInterceptEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_intercept_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_intercept_endpoint_group_associations_rest_bad_request( + request_type=intercept.ListInterceptEndpointGroupAssociationsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_intercept_endpoint_group_associations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptEndpointGroupAssociationsRequest, + dict, + ], +) +def test_list_intercept_endpoint_group_associations_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_intercept_endpoint_group_associations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptEndpointGroupAssociationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_intercept_endpoint_group_associations_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, + "post_list_intercept_endpoint_group_associations", + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_list_intercept_endpoint_group_associations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, + "pre_list_intercept_endpoint_group_associations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.ListInterceptEndpointGroupAssociationsRequest.pb( + intercept.ListInterceptEndpointGroupAssociationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.ListInterceptEndpointGroupAssociationsResponse.to_json( + intercept.ListInterceptEndpointGroupAssociationsResponse() + ) + req.return_value.content = return_value + + request = intercept.ListInterceptEndpointGroupAssociationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.ListInterceptEndpointGroupAssociationsResponse() + post_with_metadata.return_value = ( + intercept.ListInterceptEndpointGroupAssociationsResponse(), + metadata, + ) + + client.list_intercept_endpoint_group_associations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_intercept_endpoint_group_association_rest_bad_request( + request_type=intercept.GetInterceptEndpointGroupAssociationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_intercept_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_get_intercept_endpoint_group_association_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptEndpointGroupAssociation( + name="name_value", + intercept_endpoint_group="intercept_endpoint_group_value", + network="network_value", + state=intercept.InterceptEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_intercept_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptEndpointGroupAssociation) + assert response.name == "name_value" + assert response.intercept_endpoint_group == "intercept_endpoint_group_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_intercept_endpoint_group_association_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, + "post_get_intercept_endpoint_group_association", + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_get_intercept_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, + "pre_get_intercept_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.GetInterceptEndpointGroupAssociationRequest.pb( + intercept.GetInterceptEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.InterceptEndpointGroupAssociation.to_json( + intercept.InterceptEndpointGroupAssociation() + ) + req.return_value.content = return_value + + request = intercept.GetInterceptEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.InterceptEndpointGroupAssociation() + post_with_metadata.return_value = ( + intercept.InterceptEndpointGroupAssociation(), + metadata, + ) + + client.get_intercept_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_intercept_endpoint_group_association_rest_bad_request( + request_type=intercept.CreateInterceptEndpointGroupAssociationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_intercept_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_create_intercept_endpoint_group_association_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["intercept_endpoint_group_association"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "intercept_endpoint_group": "intercept_endpoint_group_value", + "network": "network_value", + "locations_details": [{"location": "location_value", "state": 1}], + "state": 1, + "reconciling": True, + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.CreateInterceptEndpointGroupAssociationRequest.meta.fields[ + "intercept_endpoint_group_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_endpoint_group_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["intercept_endpoint_group_association"][field]) + ): + del request_init["intercept_endpoint_group_association"][field][i][ + subfield + ] + else: + del request_init["intercept_endpoint_group_association"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_intercept_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_intercept_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, + "post_create_intercept_endpoint_group_association", + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_create_intercept_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, + "pre_create_intercept_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.CreateInterceptEndpointGroupAssociationRequest.pb( + intercept.CreateInterceptEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.CreateInterceptEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_intercept_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_intercept_endpoint_group_association_rest_bad_request( + request_type=intercept.UpdateInterceptEndpointGroupAssociationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "intercept_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_intercept_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_update_intercept_endpoint_group_association_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "intercept_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + } + request_init["intercept_endpoint_group_association"] = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "intercept_endpoint_group": "intercept_endpoint_group_value", + "network": "network_value", + "locations_details": [{"location": "location_value", "state": 1}], + "state": 1, + "reconciling": True, + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.UpdateInterceptEndpointGroupAssociationRequest.meta.fields[ + "intercept_endpoint_group_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_endpoint_group_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["intercept_endpoint_group_association"][field]) + ): + del request_init["intercept_endpoint_group_association"][field][i][ + subfield + ] + else: + del request_init["intercept_endpoint_group_association"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_intercept_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_intercept_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, + "post_update_intercept_endpoint_group_association", + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_update_intercept_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, + "pre_update_intercept_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.UpdateInterceptEndpointGroupAssociationRequest.pb( + intercept.UpdateInterceptEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.UpdateInterceptEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_intercept_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_intercept_endpoint_group_association_rest_bad_request( + request_type=intercept.DeleteInterceptEndpointGroupAssociationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_intercept_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptEndpointGroupAssociationRequest, + dict, + ], +) +def test_delete_intercept_endpoint_group_association_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_intercept_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_intercept_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, + "post_delete_intercept_endpoint_group_association", + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_delete_intercept_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, + "pre_delete_intercept_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.DeleteInterceptEndpointGroupAssociationRequest.pb( + intercept.DeleteInterceptEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.DeleteInterceptEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_intercept_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_intercept_deployment_groups_rest_bad_request( + request_type=intercept.ListInterceptDeploymentGroupsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_intercept_deployment_groups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptDeploymentGroupsRequest, + dict, + ], +) +def test_list_intercept_deployment_groups_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_intercept_deployment_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_intercept_deployment_groups_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_list_intercept_deployment_groups" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_list_intercept_deployment_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_list_intercept_deployment_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.ListInterceptDeploymentGroupsRequest.pb( + intercept.ListInterceptDeploymentGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.ListInterceptDeploymentGroupsResponse.to_json( + intercept.ListInterceptDeploymentGroupsResponse() + ) + req.return_value.content = return_value + + request = intercept.ListInterceptDeploymentGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.ListInterceptDeploymentGroupsResponse() + post_with_metadata.return_value = ( + intercept.ListInterceptDeploymentGroupsResponse(), + metadata, + ) + + client.list_intercept_deployment_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_intercept_deployment_group_rest_bad_request( + request_type=intercept.GetInterceptDeploymentGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_intercept_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptDeploymentGroupRequest, + dict, + ], +) +def test_get_intercept_deployment_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeploymentGroup( + name="name_value", + network="network_value", + state=intercept.InterceptDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_intercept_deployment_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == intercept.InterceptDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_intercept_deployment_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_get_intercept_deployment_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_get_intercept_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_get_intercept_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.GetInterceptDeploymentGroupRequest.pb( + intercept.GetInterceptDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.InterceptDeploymentGroup.to_json( + intercept.InterceptDeploymentGroup() + ) + req.return_value.content = return_value + + request = intercept.GetInterceptDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.InterceptDeploymentGroup() + post_with_metadata.return_value = intercept.InterceptDeploymentGroup(), metadata + + client.get_intercept_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_intercept_deployment_group_rest_bad_request( + request_type=intercept.CreateInterceptDeploymentGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_intercept_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptDeploymentGroupRequest, + dict, + ], +) +def test_create_intercept_deployment_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["intercept_deployment_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "connected_endpoint_groups": [{"name": "name_value"}], + "nested_deployments": [{"name": "name_value", "state": 1}], + "state": 1, + "reconciling": True, + "description": "description_value", + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.CreateInterceptDeploymentGroupRequest.meta.fields[ + "intercept_deployment_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_deployment_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["intercept_deployment_group"][field]) + ): + del request_init["intercept_deployment_group"][field][i][subfield] + else: + del request_init["intercept_deployment_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_intercept_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_intercept_deployment_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_create_intercept_deployment_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_create_intercept_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_create_intercept_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.CreateInterceptDeploymentGroupRequest.pb( + intercept.CreateInterceptDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.CreateInterceptDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_intercept_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_intercept_deployment_group_rest_bad_request( + request_type=intercept.UpdateInterceptDeploymentGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "intercept_deployment_group": { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_intercept_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptDeploymentGroupRequest, + dict, + ], +) +def test_update_intercept_deployment_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "intercept_deployment_group": { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + } + request_init["intercept_deployment_group"] = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "connected_endpoint_groups": [{"name": "name_value"}], + "nested_deployments": [{"name": "name_value", "state": 1}], + "state": 1, + "reconciling": True, + "description": "description_value", + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.UpdateInterceptDeploymentGroupRequest.meta.fields[ + "intercept_deployment_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_deployment_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["intercept_deployment_group"][field]) + ): + del request_init["intercept_deployment_group"][field][i][subfield] + else: + del request_init["intercept_deployment_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_intercept_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_intercept_deployment_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_update_intercept_deployment_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_update_intercept_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_update_intercept_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.UpdateInterceptDeploymentGroupRequest.pb( + intercept.UpdateInterceptDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.UpdateInterceptDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_intercept_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_intercept_deployment_group_rest_bad_request( + request_type=intercept.DeleteInterceptDeploymentGroupRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_intercept_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptDeploymentGroupRequest, + dict, + ], +) +def test_delete_intercept_deployment_group_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_intercept_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_intercept_deployment_group_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_delete_intercept_deployment_group" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_delete_intercept_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_delete_intercept_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.DeleteInterceptDeploymentGroupRequest.pb( + intercept.DeleteInterceptDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.DeleteInterceptDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_intercept_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_intercept_deployments_rest_bad_request( + request_type=intercept.ListInterceptDeploymentsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_intercept_deployments(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.ListInterceptDeploymentsRequest, + dict, + ], +) +def test_list_intercept_deployments_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.ListInterceptDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.ListInterceptDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_intercept_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInterceptDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_intercept_deployments_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_list_intercept_deployments" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_list_intercept_deployments_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_list_intercept_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.ListInterceptDeploymentsRequest.pb( + intercept.ListInterceptDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.ListInterceptDeploymentsResponse.to_json( + intercept.ListInterceptDeploymentsResponse() + ) + req.return_value.content = return_value + + request = intercept.ListInterceptDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.ListInterceptDeploymentsResponse() + post_with_metadata.return_value = ( + intercept.ListInterceptDeploymentsResponse(), + metadata, + ) + + client.list_intercept_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_intercept_deployment_rest_bad_request( + request_type=intercept.GetInterceptDeploymentRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_intercept_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.GetInterceptDeploymentRequest, + dict, + ], +) +def test_get_intercept_deployment_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = intercept.InterceptDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + intercept_deployment_group="intercept_deployment_group_value", + state=intercept.InterceptDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = intercept.InterceptDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_intercept_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, intercept.InterceptDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.intercept_deployment_group == "intercept_deployment_group_value" + assert response.state == intercept.InterceptDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_intercept_deployment_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InterceptRestInterceptor, "post_get_intercept_deployment" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_get_intercept_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_get_intercept_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.GetInterceptDeploymentRequest.pb( + intercept.GetInterceptDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = intercept.InterceptDeployment.to_json( + intercept.InterceptDeployment() + ) + req.return_value.content = return_value + + request = intercept.GetInterceptDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = intercept.InterceptDeployment() + post_with_metadata.return_value = intercept.InterceptDeployment(), metadata + + client.get_intercept_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_intercept_deployment_rest_bad_request( + request_type=intercept.CreateInterceptDeploymentRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_intercept_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.CreateInterceptDeploymentRequest, + dict, + ], +) +def test_create_intercept_deployment_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["intercept_deployment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "forwarding_rule": "forwarding_rule_value", + "intercept_deployment_group": "intercept_deployment_group_value", + "state": 1, + "reconciling": True, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.CreateInterceptDeploymentRequest.meta.fields[ + "intercept_deployment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_deployment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["intercept_deployment"][field])): + del request_init["intercept_deployment"][field][i][subfield] + else: + del request_init["intercept_deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_intercept_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_intercept_deployment_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_create_intercept_deployment" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_create_intercept_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_create_intercept_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.CreateInterceptDeploymentRequest.pb( + intercept.CreateInterceptDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.CreateInterceptDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_intercept_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_intercept_deployment_rest_bad_request( + request_type=intercept.UpdateInterceptDeploymentRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "intercept_deployment": { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_intercept_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.UpdateInterceptDeploymentRequest, + dict, + ], +) +def test_update_intercept_deployment_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "intercept_deployment": { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + } + request_init["intercept_deployment"] = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "forwarding_rule": "forwarding_rule_value", + "intercept_deployment_group": "intercept_deployment_group_value", + "state": 1, + "reconciling": True, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = intercept.UpdateInterceptDeploymentRequest.meta.fields[ + "intercept_deployment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "intercept_deployment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["intercept_deployment"][field])): + del request_init["intercept_deployment"][field][i][subfield] + else: + del request_init["intercept_deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_intercept_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_intercept_deployment_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_update_intercept_deployment" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_update_intercept_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_update_intercept_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.UpdateInterceptDeploymentRequest.pb( + intercept.UpdateInterceptDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.UpdateInterceptDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_intercept_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_intercept_deployment_rest_bad_request( + request_type=intercept.DeleteInterceptDeploymentRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_intercept_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + intercept.DeleteInterceptDeploymentRequest, + dict, + ], +) +def test_delete_intercept_deployment_rest_call_success(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/interceptDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_intercept_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_intercept_deployment_rest_interceptors(null_interceptor): + transport = transports.InterceptRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterceptRestInterceptor(), + ) + client = InterceptClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InterceptRestInterceptor, "post_delete_intercept_deployment" + ) as post, mock.patch.object( + transports.InterceptRestInterceptor, + "post_delete_intercept_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.InterceptRestInterceptor, "pre_delete_intercept_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = intercept.DeleteInterceptDeploymentRequest.pb( + intercept.DeleteInterceptDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = intercept.DeleteInterceptDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_intercept_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_endpoint_groups_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_groups), "__call__" + ) as call: + client.list_intercept_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_endpoint_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group), "__call__" + ) as call: + client.get_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_endpoint_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group), "__call__" + ) as call: + client.create_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_endpoint_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group), "__call__" + ) as call: + client.update_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_endpoint_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group), "__call__" + ) as call: + client.delete_intercept_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_endpoint_group_associations_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_endpoint_group_associations), "__call__" + ) as call: + client.list_intercept_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_endpoint_group_association_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_endpoint_group_association), "__call__" + ) as call: + client.get_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_endpoint_group_association_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_endpoint_group_association), "__call__" + ) as call: + client.create_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_endpoint_group_association_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_endpoint_group_association), "__call__" + ) as call: + client.update_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_endpoint_group_association_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_endpoint_group_association), "__call__" + ) as call: + client.delete_intercept_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_deployment_groups_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployment_groups), "__call__" + ) as call: + client.list_intercept_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_deployment_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment_group), "__call__" + ) as call: + client.get_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_deployment_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment_group), "__call__" + ) as call: + client.create_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_deployment_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment_group), "__call__" + ) as call: + client.update_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_deployment_group_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment_group), "__call__" + ) as call: + client.delete_intercept_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_intercept_deployments_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_intercept_deployments), "__call__" + ) as call: + client.list_intercept_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.ListInterceptDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_intercept_deployment_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_intercept_deployment), "__call__" + ) as call: + client.get_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.GetInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_intercept_deployment_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_intercept_deployment), "__call__" + ) as call: + client.create_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.CreateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_intercept_deployment_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_intercept_deployment), "__call__" + ) as call: + client.update_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.UpdateInterceptDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_intercept_deployment_empty_call_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_intercept_deployment), "__call__" + ) as call: + client.delete_intercept_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = intercept.DeleteInterceptDeploymentRequest() + + assert args[0] == request_msg + + +def test_intercept_rest_lro_client(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.InterceptGrpcTransport, + ) + + +def test_intercept_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InterceptTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_intercept_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.intercept.transports.InterceptTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.InterceptTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_intercept_endpoint_groups", + "get_intercept_endpoint_group", + "create_intercept_endpoint_group", + "update_intercept_endpoint_group", + "delete_intercept_endpoint_group", + "list_intercept_endpoint_group_associations", + "get_intercept_endpoint_group_association", + "create_intercept_endpoint_group_association", + "update_intercept_endpoint_group_association", + "delete_intercept_endpoint_group_association", + "list_intercept_deployment_groups", + "get_intercept_deployment_group", + "create_intercept_deployment_group", + "update_intercept_deployment_group", + "delete_intercept_deployment_group", + "list_intercept_deployments", + "get_intercept_deployment", + "create_intercept_deployment", + "update_intercept_deployment", + "delete_intercept_deployment", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_intercept_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.intercept.transports.InterceptTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterceptTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_intercept_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.intercept.transports.InterceptTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterceptTransport() + adc.assert_called_once() + + +def test_intercept_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InterceptClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InterceptGrpcTransport, + transports.InterceptGrpcAsyncIOTransport, + ], +) +def test_intercept_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InterceptGrpcTransport, + transports.InterceptGrpcAsyncIOTransport, + transports.InterceptRestTransport, + ], +) +def test_intercept_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.InterceptGrpcTransport, grpc_helpers), + (transports.InterceptGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_intercept_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.InterceptGrpcTransport, transports.InterceptGrpcAsyncIOTransport], +) +def test_intercept_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_intercept_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.InterceptRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_intercept_host_no_port(transport_name): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_intercept_host_with_port(transport_name): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_intercept_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InterceptClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InterceptClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_intercept_endpoint_groups._session + session2 = client2.transport.list_intercept_endpoint_groups._session + assert session1 != session2 + session1 = client1.transport.get_intercept_endpoint_group._session + session2 = client2.transport.get_intercept_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.create_intercept_endpoint_group._session + session2 = client2.transport.create_intercept_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.update_intercept_endpoint_group._session + session2 = client2.transport.update_intercept_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.delete_intercept_endpoint_group._session + session2 = client2.transport.delete_intercept_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.list_intercept_endpoint_group_associations._session + session2 = client2.transport.list_intercept_endpoint_group_associations._session + assert session1 != session2 + session1 = client1.transport.get_intercept_endpoint_group_association._session + session2 = client2.transport.get_intercept_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.create_intercept_endpoint_group_association._session + session2 = client2.transport.create_intercept_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.update_intercept_endpoint_group_association._session + session2 = client2.transport.update_intercept_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.delete_intercept_endpoint_group_association._session + session2 = client2.transport.delete_intercept_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.list_intercept_deployment_groups._session + session2 = client2.transport.list_intercept_deployment_groups._session + assert session1 != session2 + session1 = client1.transport.get_intercept_deployment_group._session + session2 = client2.transport.get_intercept_deployment_group._session + assert session1 != session2 + session1 = client1.transport.create_intercept_deployment_group._session + session2 = client2.transport.create_intercept_deployment_group._session + assert session1 != session2 + session1 = client1.transport.update_intercept_deployment_group._session + session2 = client2.transport.update_intercept_deployment_group._session + assert session1 != session2 + session1 = client1.transport.delete_intercept_deployment_group._session + session2 = client2.transport.delete_intercept_deployment_group._session + assert session1 != session2 + session1 = client1.transport.list_intercept_deployments._session + session2 = client2.transport.list_intercept_deployments._session + assert session1 != session2 + session1 = client1.transport.get_intercept_deployment._session + session2 = client2.transport.get_intercept_deployment._session + assert session1 != session2 + session1 = client1.transport.create_intercept_deployment._session + session2 = client2.transport.create_intercept_deployment._session + assert session1 != session2 + session1 = client1.transport.update_intercept_deployment._session + session2 = client2.transport.update_intercept_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_intercept_deployment._session + session2 = client2.transport.delete_intercept_deployment._session + assert session1 != session2 + + +def test_intercept_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InterceptGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_intercept_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InterceptGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.InterceptGrpcTransport, transports.InterceptGrpcAsyncIOTransport], +) +def test_intercept_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.InterceptGrpcTransport, transports.InterceptGrpcAsyncIOTransport], +) +def test_intercept_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_intercept_grpc_lro_client(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_intercept_grpc_lro_async_client(): + client = InterceptAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_forwarding_rule_path(): + project = "squid" + forwarding_rule = "clam" + expected = "projects/{project}/global/forwardingRules/{forwarding_rule}".format( + project=project, + forwarding_rule=forwarding_rule, + ) + actual = InterceptClient.forwarding_rule_path(project, forwarding_rule) + assert expected == actual + + +def test_parse_forwarding_rule_path(): + expected = { + "project": "whelk", + "forwarding_rule": "octopus", + } + path = InterceptClient.forwarding_rule_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_forwarding_rule_path(path) + assert expected == actual + + +def test_intercept_deployment_path(): + project = "oyster" + location = "nudibranch" + intercept_deployment = "cuttlefish" + expected = "projects/{project}/locations/{location}/interceptDeployments/{intercept_deployment}".format( + project=project, + location=location, + intercept_deployment=intercept_deployment, + ) + actual = InterceptClient.intercept_deployment_path( + project, location, intercept_deployment + ) + assert expected == actual + + +def test_parse_intercept_deployment_path(): + expected = { + "project": "mussel", + "location": "winkle", + "intercept_deployment": "nautilus", + } + path = InterceptClient.intercept_deployment_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_intercept_deployment_path(path) + assert expected == actual + + +def test_intercept_deployment_group_path(): + project = "scallop" + location = "abalone" + intercept_deployment_group = "squid" + expected = "projects/{project}/locations/{location}/interceptDeploymentGroups/{intercept_deployment_group}".format( + project=project, + location=location, + intercept_deployment_group=intercept_deployment_group, + ) + actual = InterceptClient.intercept_deployment_group_path( + project, location, intercept_deployment_group + ) + assert expected == actual + + +def test_parse_intercept_deployment_group_path(): + expected = { + "project": "clam", + "location": "whelk", + "intercept_deployment_group": "octopus", + } + path = InterceptClient.intercept_deployment_group_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_intercept_deployment_group_path(path) + assert expected == actual + + +def test_intercept_endpoint_group_path(): + project = "oyster" + location = "nudibranch" + intercept_endpoint_group = "cuttlefish" + expected = "projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group}".format( + project=project, + location=location, + intercept_endpoint_group=intercept_endpoint_group, + ) + actual = InterceptClient.intercept_endpoint_group_path( + project, location, intercept_endpoint_group + ) + assert expected == actual + + +def test_parse_intercept_endpoint_group_path(): + expected = { + "project": "mussel", + "location": "winkle", + "intercept_endpoint_group": "nautilus", + } + path = InterceptClient.intercept_endpoint_group_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_intercept_endpoint_group_path(path) + assert expected == actual + + +def test_intercept_endpoint_group_association_path(): + project = "scallop" + location = "abalone" + intercept_endpoint_group_association = "squid" + expected = "projects/{project}/locations/{location}/interceptEndpointGroupAssociations/{intercept_endpoint_group_association}".format( + project=project, + location=location, + intercept_endpoint_group_association=intercept_endpoint_group_association, + ) + actual = InterceptClient.intercept_endpoint_group_association_path( + project, location, intercept_endpoint_group_association + ) + assert expected == actual + + +def test_parse_intercept_endpoint_group_association_path(): + expected = { + "project": "clam", + "location": "whelk", + "intercept_endpoint_group_association": "octopus", + } + path = InterceptClient.intercept_endpoint_group_association_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_intercept_endpoint_group_association_path(path) + assert expected == actual + + +def test_network_path(): + project = "oyster" + network = "nudibranch" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = InterceptClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "cuttlefish", + "network": "mussel", + } + path = InterceptClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = InterceptClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = InterceptClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = InterceptClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = InterceptClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = InterceptClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = InterceptClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = InterceptClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = InterceptClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = InterceptClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = InterceptClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InterceptClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.InterceptTransport, "_prep_wrapped_messages" + ) as prep: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.InterceptTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = InterceptClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = InterceptClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = InterceptAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = InterceptAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = InterceptClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (InterceptClient, transports.InterceptGrpcTransport), + (InterceptAsyncClient, transports.InterceptGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_mirroring.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_mirroring.py new file mode 100644 index 000000000000..e41300312e7e --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_mirroring.py @@ -0,0 +1,21206 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.mirroring import ( + MirroringAsyncClient, + MirroringClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import common, mirroring + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MirroringClient._get_default_mtls_endpoint(None) is None + assert MirroringClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + MirroringClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MirroringClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MirroringClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert MirroringClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert MirroringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MirroringClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MirroringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MirroringClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MirroringClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MirroringClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MirroringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MirroringClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MirroringClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MirroringClient._get_client_cert_source(None, False) is None + assert ( + MirroringClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MirroringClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MirroringClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MirroringClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MirroringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringClient), +) +@mock.patch.object( + MirroringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MirroringClient._DEFAULT_UNIVERSE + default_endpoint = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + MirroringClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MirroringClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MirroringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MirroringClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MirroringClient._get_api_endpoint(None, None, default_universe, "always") + == MirroringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MirroringClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MirroringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MirroringClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MirroringClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MirroringClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MirroringClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MirroringClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MirroringClient._get_universe_domain(None, None) + == MirroringClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MirroringClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MirroringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MirroringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MirroringClient, "grpc"), + (MirroringAsyncClient, "grpc_asyncio"), + (MirroringClient, "rest"), + ], +) +def test_mirroring_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MirroringGrpcTransport, "grpc"), + (transports.MirroringGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MirroringRestTransport, "rest"), + ], +) +def test_mirroring_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MirroringClient, "grpc"), + (MirroringAsyncClient, "grpc_asyncio"), + (MirroringClient, "rest"), + ], +) +def test_mirroring_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_mirroring_client_get_transport_class(): + transport = MirroringClient.get_transport_class() + available_transports = [ + transports.MirroringGrpcTransport, + transports.MirroringRestTransport, + ] + assert transport in available_transports + + transport = MirroringClient.get_transport_class("grpc") + assert transport == transports.MirroringGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MirroringClient, transports.MirroringGrpcTransport, "grpc"), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MirroringClient, transports.MirroringRestTransport, "rest"), + ], +) +@mock.patch.object( + MirroringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringClient), +) +@mock.patch.object( + MirroringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringAsyncClient), +) +def test_mirroring_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MirroringClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MirroringClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MirroringClient, transports.MirroringGrpcTransport, "grpc", "true"), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (MirroringClient, transports.MirroringGrpcTransport, "grpc", "false"), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (MirroringClient, transports.MirroringRestTransport, "rest", "true"), + (MirroringClient, transports.MirroringRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MirroringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringClient), +) +@mock.patch.object( + MirroringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_mirroring_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [MirroringClient, MirroringAsyncClient]) +@mock.patch.object( + MirroringClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MirroringClient) +) +@mock.patch.object( + MirroringAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MirroringAsyncClient), +) +def test_mirroring_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [MirroringClient, MirroringAsyncClient]) +@mock.patch.object( + MirroringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringClient), +) +@mock.patch.object( + MirroringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MirroringAsyncClient), +) +def test_mirroring_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MirroringClient._DEFAULT_UNIVERSE + default_endpoint = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MirroringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MirroringClient, transports.MirroringGrpcTransport, "grpc"), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MirroringClient, transports.MirroringRestTransport, "rest"), + ], +) +def test_mirroring_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MirroringClient, transports.MirroringGrpcTransport, "grpc", grpc_helpers), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (MirroringClient, transports.MirroringRestTransport, "rest", None), + ], +) +def test_mirroring_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_mirroring_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.mirroring.transports.MirroringGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MirroringClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MirroringClient, transports.MirroringGrpcTransport, "grpc", grpc_helpers), + ( + MirroringAsyncClient, + transports.MirroringGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_mirroring_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringEndpointGroupsRequest, + dict, + ], +) +def test_list_mirroring_endpoint_groups(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringEndpointGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_mirroring_endpoint_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.ListMirroringEndpointGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_mirroring_endpoint_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.ListMirroringEndpointGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_mirroring_endpoint_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_endpoint_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_endpoint_groups + ] = mock_rpc + request = {} + client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_mirroring_endpoint_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_mirroring_endpoint_groups + ] = mock_rpc + + request = {} + await client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_mirroring_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_async( + transport: str = "grpc_asyncio", + request_type=mirroring.ListMirroringEndpointGroupsRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringEndpointGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_async_from_dict(): + await test_list_mirroring_endpoint_groups_async(request_type=dict) + + +def test_list_mirroring_endpoint_groups_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringEndpointGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringEndpointGroupsResponse() + client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringEndpointGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupsResponse() + ) + await client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_mirroring_endpoint_groups_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_mirroring_endpoint_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_mirroring_endpoint_groups_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_endpoint_groups( + mirroring.ListMirroringEndpointGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_mirroring_endpoint_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_mirroring_endpoint_groups( + mirroring.ListMirroringEndpointGroupsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_endpoint_groups_pager(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_mirroring_endpoint_groups( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringEndpointGroup) for i in results) + + +def test_list_mirroring_endpoint_groups_pages(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_mirroring_endpoint_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_async_pager(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_mirroring_endpoint_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, mirroring.MirroringEndpointGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_async_pages(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_mirroring_endpoint_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringEndpointGroupRequest, + dict, + ], +) +def test_get_mirroring_endpoint_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroup( + name="name_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringEndpointGroup.State.ACTIVE, + reconciling=True, + type_=mirroring.MirroringEndpointGroup.Type.DIRECT, + description="description_value", + ) + response = client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroup) + assert response.name == "name_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.type_ == mirroring.MirroringEndpointGroup.Type.DIRECT + assert response.description == "description_value" + + +def test_get_mirroring_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_mirroring_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.GetMirroringEndpointGroupRequest( + name="name_value", + ) + + +def test_get_mirroring_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_endpoint_group + ] = mock_rpc + request = {} + client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_mirroring_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_mirroring_endpoint_group + ] = mock_rpc + + request = {} + await client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.GetMirroringEndpointGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroup( + name="name_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringEndpointGroup.State.ACTIVE, + reconciling=True, + type_=mirroring.MirroringEndpointGroup.Type.DIRECT, + description="description_value", + ) + ) + response = await client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroup) + assert response.name == "name_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.type_ == mirroring.MirroringEndpointGroup.Type.DIRECT + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_async_from_dict(): + await test_get_mirroring_endpoint_group_async(request_type=dict) + + +def test_get_mirroring_endpoint_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = mirroring.MirroringEndpointGroup() + client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroup() + ) + await client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_mirroring_endpoint_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_mirroring_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_mirroring_endpoint_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_endpoint_group( + mirroring.GetMirroringEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_mirroring_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_mirroring_endpoint_group( + mirroring.GetMirroringEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringEndpointGroupRequest, + dict, + ], +) +def test_create_mirroring_endpoint_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_mirroring_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_mirroring_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.CreateMirroringEndpointGroupRequest( + parent="parent_value", + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + +def test_create_mirroring_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_endpoint_group + ] = mock_rpc + request = {} + client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_mirroring_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_mirroring_endpoint_group + ] = mock_rpc + + request = {} + await client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.CreateMirroringEndpointGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_async_from_dict(): + await test_create_mirroring_endpoint_group_async(request_type=dict) + + +def test_create_mirroring_endpoint_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringEndpointGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringEndpointGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_mirroring_endpoint_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_mirroring_endpoint_group( + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_endpoint_group + mock_val = mirroring.MirroringEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_id + mock_val = "mirroring_endpoint_group_id_value" + assert arg == mock_val + + +def test_create_mirroring_endpoint_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_endpoint_group( + mirroring.CreateMirroringEndpointGroupRequest(), + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_mirroring_endpoint_group( + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_endpoint_group + mock_val = mirroring.MirroringEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_id + mock_val = "mirroring_endpoint_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_mirroring_endpoint_group( + mirroring.CreateMirroringEndpointGroupRequest(), + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringEndpointGroupRequest, + dict, + ], +) +def test_update_mirroring_endpoint_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_mirroring_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.UpdateMirroringEndpointGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_mirroring_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.UpdateMirroringEndpointGroupRequest() + + +def test_update_mirroring_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_endpoint_group + ] = mock_rpc + request = {} + client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_mirroring_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_mirroring_endpoint_group + ] = mock_rpc + + request = {} + await client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.UpdateMirroringEndpointGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_async_from_dict(): + await test_update_mirroring_endpoint_group_async(request_type=dict) + + +def test_update_mirroring_endpoint_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringEndpointGroupRequest() + + request.mirroring_endpoint_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_endpoint_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringEndpointGroupRequest() + + request.mirroring_endpoint_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_endpoint_group.name=name_value", + ) in kw["metadata"] + + +def test_update_mirroring_endpoint_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_mirroring_endpoint_group( + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_endpoint_group + mock_val = mirroring.MirroringEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_mirroring_endpoint_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_endpoint_group( + mirroring.UpdateMirroringEndpointGroupRequest(), + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_mirroring_endpoint_group( + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_endpoint_group + mock_val = mirroring.MirroringEndpointGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_mirroring_endpoint_group( + mirroring.UpdateMirroringEndpointGroupRequest(), + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringEndpointGroupRequest, + dict, + ], +) +def test_delete_mirroring_endpoint_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_mirroring_endpoint_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_mirroring_endpoint_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.DeleteMirroringEndpointGroupRequest( + name="name_value", + ) + + +def test_delete_mirroring_endpoint_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_endpoint_group + ] = mock_rpc + request = {} + client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_mirroring_endpoint_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_mirroring_endpoint_group + ] = mock_rpc + + request = {} + await client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.DeleteMirroringEndpointGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringEndpointGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_async_from_dict(): + await test_delete_mirroring_endpoint_group_async(request_type=dict) + + +def test_delete_mirroring_endpoint_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringEndpointGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_mirroring_endpoint_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_mirroring_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_mirroring_endpoint_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_endpoint_group( + mirroring.DeleteMirroringEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_mirroring_endpoint_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_mirroring_endpoint_group( + mirroring.DeleteMirroringEndpointGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringEndpointGroupAssociationsRequest, + dict, + ], +) +def test_list_mirroring_endpoint_group_associations( + request_type, transport: str = "grpc" +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringEndpointGroupAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupAssociationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_mirroring_endpoint_group_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_mirroring_endpoint_group_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.ListMirroringEndpointGroupAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_mirroring_endpoint_group_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_endpoint_group_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_endpoint_group_associations + ] = mock_rpc + request = {} + client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_mirroring_endpoint_group_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_mirroring_endpoint_group_associations + ] = mock_rpc + + request = {} + await client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_mirroring_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_async( + transport: str = "grpc_asyncio", + request_type=mirroring.ListMirroringEndpointGroupAssociationsRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringEndpointGroupAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_async_from_dict(): + await test_list_mirroring_endpoint_group_associations_async(request_type=dict) + + +def test_list_mirroring_endpoint_group_associations_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringEndpointGroupAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringEndpointGroupAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupAssociationsResponse() + ) + await client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_mirroring_endpoint_group_associations_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_mirroring_endpoint_group_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_mirroring_endpoint_group_associations_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_endpoint_group_associations( + mirroring.ListMirroringEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_mirroring_endpoint_group_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_mirroring_endpoint_group_associations( + mirroring.ListMirroringEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_endpoint_group_associations_pager(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_mirroring_endpoint_group_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, mirroring.MirroringEndpointGroupAssociation) for i in results + ) + + +def test_list_mirroring_endpoint_group_associations_pages(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + pages = list( + client.list_mirroring_endpoint_group_associations(request={}).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_async_pager(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_mirroring_endpoint_group_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, mirroring.MirroringEndpointGroupAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_async_pages(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_mirroring_endpoint_group_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_get_mirroring_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroupAssociation( + name="name_value", + mirroring_endpoint_group="mirroring_endpoint_group_value", + network="network_value", + state=mirroring.MirroringEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + response = client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroupAssociation) + assert response.name == "name_value" + assert response.mirroring_endpoint_group == "mirroring_endpoint_group_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +def test_get_mirroring_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_mirroring_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.GetMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + +def test_get_mirroring_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_endpoint_group_association + ] = mock_rpc + request = {} + client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_mirroring_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + await client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=mirroring.GetMirroringEndpointGroupAssociationRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroupAssociation( + name="name_value", + mirroring_endpoint_group="mirroring_endpoint_group_value", + network="network_value", + state=mirroring.MirroringEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + ) + response = await client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroupAssociation) + assert response.name == "name_value" + assert response.mirroring_endpoint_group == "mirroring_endpoint_group_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_async_from_dict(): + await test_get_mirroring_endpoint_group_association_async(request_type=dict) + + +def test_get_mirroring_endpoint_group_association_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = mirroring.MirroringEndpointGroupAssociation() + client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroupAssociation() + ) + await client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_mirroring_endpoint_group_association_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroupAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_mirroring_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_mirroring_endpoint_group_association_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_endpoint_group_association( + mirroring.GetMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringEndpointGroupAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroupAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_mirroring_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_mirroring_endpoint_group_association( + mirroring.GetMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_create_mirroring_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_mirroring_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_mirroring_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.CreateMirroringEndpointGroupAssociationRequest( + parent="parent_value", + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + +def test_create_mirroring_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_endpoint_group_association + ] = mock_rpc + request = {} + client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_mirroring_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + await client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=mirroring.CreateMirroringEndpointGroupAssociationRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_async_from_dict(): + await test_create_mirroring_endpoint_group_association_async(request_type=dict) + + +def test_create_mirroring_endpoint_group_association_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringEndpointGroupAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringEndpointGroupAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_mirroring_endpoint_group_association_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_mirroring_endpoint_group_association( + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_association + mock_val = mirroring.MirroringEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_association_id + mock_val = "mirroring_endpoint_group_association_id_value" + assert arg == mock_val + + +def test_create_mirroring_endpoint_group_association_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_endpoint_group_association( + mirroring.CreateMirroringEndpointGroupAssociationRequest(), + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_mirroring_endpoint_group_association( + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_association + mock_val = mirroring.MirroringEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_endpoint_group_association_id + mock_val = "mirroring_endpoint_group_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_mirroring_endpoint_group_association( + mirroring.CreateMirroringEndpointGroupAssociationRequest(), + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_update_mirroring_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_mirroring_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_mirroring_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + +def test_update_mirroring_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_endpoint_group_association + ] = mock_rpc + request = {} + client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_mirroring_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + await client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=mirroring.UpdateMirroringEndpointGroupAssociationRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_async_from_dict(): + await test_update_mirroring_endpoint_group_association_async(request_type=dict) + + +def test_update_mirroring_endpoint_group_association_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + request.mirroring_endpoint_group_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_endpoint_group_association.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + request.mirroring_endpoint_group_association.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_endpoint_group_association.name=name_value", + ) in kw["metadata"] + + +def test_update_mirroring_endpoint_group_association_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_mirroring_endpoint_group_association( + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_endpoint_group_association + mock_val = mirroring.MirroringEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_mirroring_endpoint_group_association_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_endpoint_group_association( + mirroring.UpdateMirroringEndpointGroupAssociationRequest(), + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_mirroring_endpoint_group_association( + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_endpoint_group_association + mock_val = mirroring.MirroringEndpointGroupAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_mirroring_endpoint_group_association( + mirroring.UpdateMirroringEndpointGroupAssociationRequest(), + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_delete_mirroring_endpoint_group_association( + request_type, transport: str = "grpc" +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_mirroring_endpoint_group_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_mirroring_endpoint_group_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.DeleteMirroringEndpointGroupAssociationRequest( + name="name_value", + ) + + +def test_delete_mirroring_endpoint_group_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_endpoint_group_association + ] = mock_rpc + request = {} + client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_mirroring_endpoint_group_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + await client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_async( + transport: str = "grpc_asyncio", + request_type=mirroring.DeleteMirroringEndpointGroupAssociationRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_async_from_dict(): + await test_delete_mirroring_endpoint_group_association_async(request_type=dict) + + +def test_delete_mirroring_endpoint_group_association_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_mirroring_endpoint_group_association_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_mirroring_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_mirroring_endpoint_group_association_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_endpoint_group_association( + mirroring.DeleteMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_mirroring_endpoint_group_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_mirroring_endpoint_group_association( + mirroring.DeleteMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringDeploymentGroupsRequest, + dict, + ], +) +def test_list_mirroring_deployment_groups(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_mirroring_deployment_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_mirroring_deployment_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.ListMirroringDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_mirroring_deployment_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_deployment_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_deployment_groups + ] = mock_rpc + request = {} + client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_mirroring_deployment_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_mirroring_deployment_groups + ] = mock_rpc + + request = {} + await client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_mirroring_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_async( + transport: str = "grpc_asyncio", + request_type=mirroring.ListMirroringDeploymentGroupsRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_async_from_dict(): + await test_list_mirroring_deployment_groups_async(request_type=dict) + + +def test_list_mirroring_deployment_groups_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringDeploymentGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringDeploymentGroupsResponse() + client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringDeploymentGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentGroupsResponse() + ) + await client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_mirroring_deployment_groups_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_mirroring_deployment_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_mirroring_deployment_groups_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_deployment_groups( + mirroring.ListMirroringDeploymentGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_mirroring_deployment_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_mirroring_deployment_groups( + mirroring.ListMirroringDeploymentGroupsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_deployment_groups_pager(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_mirroring_deployment_groups( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringDeploymentGroup) for i in results) + + +def test_list_mirroring_deployment_groups_pages(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_mirroring_deployment_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_async_pager(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_mirroring_deployment_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, mirroring.MirroringDeploymentGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_async_pages(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_mirroring_deployment_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringDeploymentGroupRequest, + dict, + ], +) +def test_get_mirroring_deployment_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeploymentGroup( + name="name_value", + network="network_value", + state=mirroring.MirroringDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + response = client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +def test_get_mirroring_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_mirroring_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.GetMirroringDeploymentGroupRequest( + name="name_value", + ) + + +def test_get_mirroring_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_deployment_group + ] = mock_rpc + request = {} + client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_mirroring_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_mirroring_deployment_group + ] = mock_rpc + + request = {} + await client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.GetMirroringDeploymentGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeploymentGroup( + name="name_value", + network="network_value", + state=mirroring.MirroringDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + response = await client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_async_from_dict(): + await test_get_mirroring_deployment_group_async(request_type=dict) + + +def test_get_mirroring_deployment_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = mirroring.MirroringDeploymentGroup() + client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeploymentGroup() + ) + await client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_mirroring_deployment_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeploymentGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_mirroring_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_mirroring_deployment_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_deployment_group( + mirroring.GetMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeploymentGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeploymentGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_mirroring_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_mirroring_deployment_group( + mirroring.GetMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringDeploymentGroupRequest, + dict, + ], +) +def test_create_mirroring_deployment_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_mirroring_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_mirroring_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.CreateMirroringDeploymentGroupRequest( + parent="parent_value", + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + +def test_create_mirroring_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_deployment_group + ] = mock_rpc + request = {} + client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_mirroring_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_mirroring_deployment_group + ] = mock_rpc + + request = {} + await client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.CreateMirroringDeploymentGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_async_from_dict(): + await test_create_mirroring_deployment_group_async(request_type=dict) + + +def test_create_mirroring_deployment_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringDeploymentGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringDeploymentGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_mirroring_deployment_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_mirroring_deployment_group( + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_deployment_group + mock_val = mirroring.MirroringDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_deployment_group_id + mock_val = "mirroring_deployment_group_id_value" + assert arg == mock_val + + +def test_create_mirroring_deployment_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_deployment_group( + mirroring.CreateMirroringDeploymentGroupRequest(), + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_mirroring_deployment_group( + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_deployment_group + mock_val = mirroring.MirroringDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_deployment_group_id + mock_val = "mirroring_deployment_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_mirroring_deployment_group( + mirroring.CreateMirroringDeploymentGroupRequest(), + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringDeploymentGroupRequest, + dict, + ], +) +def test_update_mirroring_deployment_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_mirroring_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.UpdateMirroringDeploymentGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_mirroring_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.UpdateMirroringDeploymentGroupRequest() + + +def test_update_mirroring_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_deployment_group + ] = mock_rpc + request = {} + client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_mirroring_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_mirroring_deployment_group + ] = mock_rpc + + request = {} + await client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.UpdateMirroringDeploymentGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_async_from_dict(): + await test_update_mirroring_deployment_group_async(request_type=dict) + + +def test_update_mirroring_deployment_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringDeploymentGroupRequest() + + request.mirroring_deployment_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_deployment_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringDeploymentGroupRequest() + + request.mirroring_deployment_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_deployment_group.name=name_value", + ) in kw["metadata"] + + +def test_update_mirroring_deployment_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_mirroring_deployment_group( + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_deployment_group + mock_val = mirroring.MirroringDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_mirroring_deployment_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_deployment_group( + mirroring.UpdateMirroringDeploymentGroupRequest(), + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_mirroring_deployment_group( + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_deployment_group + mock_val = mirroring.MirroringDeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_mirroring_deployment_group( + mirroring.UpdateMirroringDeploymentGroupRequest(), + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringDeploymentGroupRequest, + dict, + ], +) +def test_delete_mirroring_deployment_group(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_mirroring_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_mirroring_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.DeleteMirroringDeploymentGroupRequest( + name="name_value", + ) + + +def test_delete_mirroring_deployment_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_deployment_group + ] = mock_rpc + request = {} + client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_mirroring_deployment_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_mirroring_deployment_group + ] = mock_rpc + + request = {} + await client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=mirroring.DeleteMirroringDeploymentGroupRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_async_from_dict(): + await test_delete_mirroring_deployment_group_async(request_type=dict) + + +def test_delete_mirroring_deployment_group_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_mirroring_deployment_group_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_mirroring_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_mirroring_deployment_group_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_deployment_group( + mirroring.DeleteMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_mirroring_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_mirroring_deployment_group( + mirroring.DeleteMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringDeploymentsRequest, + dict, + ], +) +def test_list_mirroring_deployments(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringDeploymentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_mirroring_deployments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.ListMirroringDeploymentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_mirroring_deployments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.ListMirroringDeploymentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_mirroring_deployments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_deployments + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_deployments + ] = mock_rpc + request = {} + client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_mirroring_deployments + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_mirroring_deployments + ] = mock_rpc + + request = {} + await client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_mirroring_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_async( + transport: str = "grpc_asyncio", + request_type=mirroring.ListMirroringDeploymentsRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.ListMirroringDeploymentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_async_from_dict(): + await test_list_mirroring_deployments_async(request_type=dict) + + +def test_list_mirroring_deployments_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringDeploymentsResponse() + client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.ListMirroringDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentsResponse() + ) + await client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_mirroring_deployments_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_mirroring_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_mirroring_deployments_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_deployments( + mirroring.ListMirroringDeploymentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.ListMirroringDeploymentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_mirroring_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_mirroring_deployments( + mirroring.ListMirroringDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_deployments_pager(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_mirroring_deployments( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringDeployment) for i in results) + + +def test_list_mirroring_deployments_pages(transport_name: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_mirroring_deployments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_async_pager(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_mirroring_deployments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, mirroring.MirroringDeployment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_mirroring_deployments_async_pages(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_mirroring_deployments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringDeploymentRequest, + dict, + ], +) +def test_get_mirroring_deployment(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + response = client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +def test_get_mirroring_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.GetMirroringDeploymentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_mirroring_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.GetMirroringDeploymentRequest( + name="name_value", + ) + + +def test_get_mirroring_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_deployment + ] = mock_rpc + request = {} + client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_mirroring_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_mirroring_deployment + ] = mock_rpc + + request = {} + await client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_async( + transport: str = "grpc_asyncio", + request_type=mirroring.GetMirroringDeploymentRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + response = await client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.GetMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_async_from_dict(): + await test_get_mirroring_deployment_async(request_type=dict) + + +def test_get_mirroring_deployment_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + call.return_value = mirroring.MirroringDeployment() + client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.GetMirroringDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeployment() + ) + await client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_mirroring_deployment_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_mirroring_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_mirroring_deployment_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_deployment( + mirroring.GetMirroringDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mirroring.MirroringDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_mirroring_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_mirroring_deployment_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_mirroring_deployment( + mirroring.GetMirroringDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringDeploymentRequest, + dict, + ], +) +def test_create_mirroring_deployment(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_mirroring_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_mirroring_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.CreateMirroringDeploymentRequest( + parent="parent_value", + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + +def test_create_mirroring_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_deployment + ] = mock_rpc + request = {} + client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_mirroring_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_mirroring_deployment + ] = mock_rpc + + request = {} + await client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_async( + transport: str = "grpc_asyncio", + request_type=mirroring.CreateMirroringDeploymentRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.CreateMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_async_from_dict(): + await test_create_mirroring_deployment_async(request_type=dict) + + +def test_create_mirroring_deployment_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.CreateMirroringDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_mirroring_deployment_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_mirroring_deployment( + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_deployment + mock_val = mirroring.MirroringDeployment(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_deployment_id + mock_val = "mirroring_deployment_id_value" + assert arg == mock_val + + +def test_create_mirroring_deployment_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_deployment( + mirroring.CreateMirroringDeploymentRequest(), + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_mirroring_deployment( + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].mirroring_deployment + mock_val = mirroring.MirroringDeployment(name="name_value") + assert arg == mock_val + arg = args[0].mirroring_deployment_id + mock_val = "mirroring_deployment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_mirroring_deployment_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_mirroring_deployment( + mirroring.CreateMirroringDeploymentRequest(), + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringDeploymentRequest, + dict, + ], +) +def test_update_mirroring_deployment(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_mirroring_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.UpdateMirroringDeploymentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_mirroring_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.UpdateMirroringDeploymentRequest() + + +def test_update_mirroring_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_deployment + ] = mock_rpc + request = {} + client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_mirroring_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_mirroring_deployment + ] = mock_rpc + + request = {} + await client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_async( + transport: str = "grpc_asyncio", + request_type=mirroring.UpdateMirroringDeploymentRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.UpdateMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_async_from_dict(): + await test_update_mirroring_deployment_async(request_type=dict) + + +def test_update_mirroring_deployment_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringDeploymentRequest() + + request.mirroring_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_deployment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.UpdateMirroringDeploymentRequest() + + request.mirroring_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "mirroring_deployment.name=name_value", + ) in kw["metadata"] + + +def test_update_mirroring_deployment_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_mirroring_deployment( + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_deployment + mock_val = mirroring.MirroringDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_mirroring_deployment_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_deployment( + mirroring.UpdateMirroringDeploymentRequest(), + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_mirroring_deployment( + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].mirroring_deployment + mock_val = mirroring.MirroringDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_mirroring_deployment_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_mirroring_deployment( + mirroring.UpdateMirroringDeploymentRequest(), + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringDeploymentRequest, + dict, + ], +) +def test_delete_mirroring_deployment(request_type, transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_mirroring_deployment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = mirroring.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_mirroring_deployment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == mirroring.DeleteMirroringDeploymentRequest( + name="name_value", + ) + + +def test_delete_mirroring_deployment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_deployment + ] = mock_rpc + request = {} + client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_mirroring_deployment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_mirroring_deployment + ] = mock_rpc + + request = {} + await client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_async( + transport: str = "grpc_asyncio", + request_type=mirroring.DeleteMirroringDeploymentRequest, +): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = mirroring.DeleteMirroringDeploymentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_async_from_dict(): + await test_delete_mirroring_deployment_async(request_type=dict) + + +def test_delete_mirroring_deployment_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = mirroring.DeleteMirroringDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_mirroring_deployment_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_mirroring_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_mirroring_deployment_flattened_error(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_deployment( + mirroring.DeleteMirroringDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_flattened_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_mirroring_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_flattened_error_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_mirroring_deployment( + mirroring.DeleteMirroringDeploymentRequest(), + name="name_value", + ) + + +def test_list_mirroring_endpoint_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_endpoint_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_endpoint_groups + ] = mock_rpc + + request = {} + client.list_mirroring_endpoint_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_endpoint_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_mirroring_endpoint_groups_rest_required_fields( + request_type=mirroring.ListMirroringEndpointGroupsRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_endpoint_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_endpoint_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_mirroring_endpoint_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_mirroring_endpoint_groups_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_mirroring_endpoint_groups._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_mirroring_endpoint_groups_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_mirroring_endpoint_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_list_mirroring_endpoint_groups_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_endpoint_groups( + mirroring.ListMirroringEndpointGroupsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_endpoint_groups_rest_pager(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupsResponse( + mirroring_endpoint_groups=[ + mirroring.MirroringEndpointGroup(), + mirroring.MirroringEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + mirroring.ListMirroringEndpointGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_mirroring_endpoint_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringEndpointGroup) for i in results) + + pages = list( + client.list_mirroring_endpoint_groups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_mirroring_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_endpoint_group + ] = mock_rpc + + request = {} + client.get_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_mirroring_endpoint_group_rest_required_fields( + request_type=mirroring.GetMirroringEndpointGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_mirroring_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_mirroring_endpoint_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_mirroring_endpoint_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_mirroring_endpoint_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_mirroring_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_mirroring_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_endpoint_group( + mirroring.GetMirroringEndpointGroupRequest(), + name="name_value", + ) + + +def test_create_mirroring_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_endpoint_group + ] = mock_rpc + + request = {} + client.create_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_mirroring_endpoint_group_rest_required_fields( + request_type=mirroring.CreateMirroringEndpointGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["mirroring_endpoint_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "mirroringEndpointGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "mirroringEndpointGroupId" in jsonified_request + assert ( + jsonified_request["mirroringEndpointGroupId"] + == request_init["mirroring_endpoint_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["mirroringEndpointGroupId"] = "mirroring_endpoint_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "mirroring_endpoint_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "mirroringEndpointGroupId" in jsonified_request + assert ( + jsonified_request["mirroringEndpointGroupId"] + == "mirroring_endpoint_group_id_value" + ) + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_mirroring_endpoint_group(request) + + expected_params = [ + ( + "mirroringEndpointGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_mirroring_endpoint_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_mirroring_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "mirroringEndpointGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "mirroringEndpointGroupId", + "mirroringEndpointGroup", + ) + ) + ) + + +def test_create_mirroring_endpoint_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_mirroring_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_create_mirroring_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_endpoint_group( + mirroring.CreateMirroringEndpointGroupRequest(), + parent="parent_value", + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + mirroring_endpoint_group_id="mirroring_endpoint_group_id_value", + ) + + +def test_update_mirroring_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_endpoint_group + ] = mock_rpc + + request = {} + client.update_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_mirroring_endpoint_group_rest_required_fields( + request_type=mirroring.UpdateMirroringEndpointGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_mirroring_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_mirroring_endpoint_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_mirroring_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("mirroringEndpointGroup",)) + ) + + +def test_update_mirroring_endpoint_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "mirroring_endpoint_group": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_mirroring_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{mirroring_endpoint_group.name=projects/*/locations/*/mirroringEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_mirroring_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_endpoint_group( + mirroring.UpdateMirroringEndpointGroupRequest(), + mirroring_endpoint_group=mirroring.MirroringEndpointGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_mirroring_endpoint_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_endpoint_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_endpoint_group + ] = mock_rpc + + request = {} + client.delete_mirroring_endpoint_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_endpoint_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_mirroring_endpoint_group_rest_required_fields( + request_type=mirroring.DeleteMirroringEndpointGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_endpoint_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_mirroring_endpoint_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_mirroring_endpoint_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_mirroring_endpoint_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_mirroring_endpoint_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_mirroring_endpoint_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_mirroring_endpoint_group_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_endpoint_group( + mirroring.DeleteMirroringEndpointGroupRequest(), + name="name_value", + ) + + +def test_list_mirroring_endpoint_group_associations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_endpoint_group_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_endpoint_group_associations + ] = mock_rpc + + request = {} + client.list_mirroring_endpoint_group_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_endpoint_group_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_mirroring_endpoint_group_associations_rest_required_fields( + request_type=mirroring.ListMirroringEndpointGroupAssociationsRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_endpoint_group_associations._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_endpoint_group_associations._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_mirroring_endpoint_group_associations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_mirroring_endpoint_group_associations_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_mirroring_endpoint_group_associations._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_mirroring_endpoint_group_associations_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_mirroring_endpoint_group_associations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroupAssociations" + % client.transport._host, + args[1], + ) + + +def test_list_mirroring_endpoint_group_associations_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_endpoint_group_associations( + mirroring.ListMirroringEndpointGroupAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_endpoint_group_associations_rest_pager(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[], + next_page_token="def", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringEndpointGroupAssociationsResponse( + mirroring_endpoint_group_associations=[ + mirroring.MirroringEndpointGroupAssociation(), + mirroring.MirroringEndpointGroupAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + mirroring.ListMirroringEndpointGroupAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_mirroring_endpoint_group_associations( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, mirroring.MirroringEndpointGroupAssociation) for i in results + ) + + pages = list( + client.list_mirroring_endpoint_group_associations( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_mirroring_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + client.get_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_mirroring_endpoint_group_association_rest_required_fields( + request_type=mirroring.GetMirroringEndpointGroupAssociationRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroupAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_mirroring_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_mirroring_endpoint_group_association_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_mirroring_endpoint_group_association._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_mirroring_endpoint_group_association_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroupAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_mirroring_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_mirroring_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_endpoint_group_association( + mirroring.GetMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +def test_create_mirroring_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + client.create_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_mirroring_endpoint_group_association_rest_required_fields( + request_type=mirroring.CreateMirroringEndpointGroupAssociationRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "mirroring_endpoint_group_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_mirroring_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_mirroring_endpoint_group_association_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_mirroring_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "mirroringEndpointGroupAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "mirroringEndpointGroupAssociation", + ) + ) + ) + + +def test_create_mirroring_endpoint_group_association_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_mirroring_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringEndpointGroupAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_mirroring_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_endpoint_group_association( + mirroring.CreateMirroringEndpointGroupAssociationRequest(), + parent="parent_value", + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + mirroring_endpoint_group_association_id="mirroring_endpoint_group_association_id_value", + ) + + +def test_update_mirroring_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + client.update_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_mirroring_endpoint_group_association_rest_required_fields( + request_type=mirroring.UpdateMirroringEndpointGroupAssociationRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_mirroring_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_mirroring_endpoint_group_association_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_mirroring_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("mirroringEndpointGroupAssociation",)) + ) + + +def test_update_mirroring_endpoint_group_association_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "mirroring_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_mirroring_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{mirroring_endpoint_group_association.name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_mirroring_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_endpoint_group_association( + mirroring.UpdateMirroringEndpointGroupAssociationRequest(), + mirroring_endpoint_group_association=mirroring.MirroringEndpointGroupAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_mirroring_endpoint_group_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_endpoint_group_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_endpoint_group_association + ] = mock_rpc + + request = {} + client.delete_mirroring_endpoint_group_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_endpoint_group_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_mirroring_endpoint_group_association_rest_required_fields( + request_type=mirroring.DeleteMirroringEndpointGroupAssociationRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_endpoint_group_association._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_mirroring_endpoint_group_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_mirroring_endpoint_group_association_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_mirroring_endpoint_group_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_mirroring_endpoint_group_association_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_mirroring_endpoint_group_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringEndpointGroupAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_mirroring_endpoint_group_association_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_endpoint_group_association( + mirroring.DeleteMirroringEndpointGroupAssociationRequest(), + name="name_value", + ) + + +def test_list_mirroring_deployment_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_deployment_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_deployment_groups + ] = mock_rpc + + request = {} + client.list_mirroring_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_mirroring_deployment_groups_rest_required_fields( + request_type=mirroring.ListMirroringDeploymentGroupsRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_deployment_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_deployment_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentGroupsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_mirroring_deployment_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_mirroring_deployment_groups_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_mirroring_deployment_groups._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_mirroring_deployment_groups_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_mirroring_deployment_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringDeploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_list_mirroring_deployment_groups_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_deployment_groups( + mirroring.ListMirroringDeploymentGroupsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_deployment_groups_rest_pager(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentGroupsResponse( + mirroring_deployment_groups=[ + mirroring.MirroringDeploymentGroup(), + mirroring.MirroringDeploymentGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + mirroring.ListMirroringDeploymentGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_mirroring_deployment_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringDeploymentGroup) for i in results) + + pages = list( + client.list_mirroring_deployment_groups(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_mirroring_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_deployment_group + ] = mock_rpc + + request = {} + client.get_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_mirroring_deployment_group_rest_required_fields( + request_type=mirroring.GetMirroringDeploymentGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeploymentGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_mirroring_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_mirroring_deployment_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_mirroring_deployment_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_mirroring_deployment_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeploymentGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.MirroringDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_mirroring_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_mirroring_deployment_group_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_deployment_group( + mirroring.GetMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +def test_create_mirroring_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_deployment_group + ] = mock_rpc + + request = {} + client.create_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_mirroring_deployment_group_rest_required_fields( + request_type=mirroring.CreateMirroringDeploymentGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["mirroring_deployment_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "mirroringDeploymentGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "mirroringDeploymentGroupId" in jsonified_request + assert ( + jsonified_request["mirroringDeploymentGroupId"] + == request_init["mirroring_deployment_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "mirroringDeploymentGroupId" + ] = "mirroring_deployment_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "mirroring_deployment_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "mirroringDeploymentGroupId" in jsonified_request + assert ( + jsonified_request["mirroringDeploymentGroupId"] + == "mirroring_deployment_group_id_value" + ) + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_mirroring_deployment_group(request) + + expected_params = [ + ( + "mirroringDeploymentGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_mirroring_deployment_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_mirroring_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "mirroringDeploymentGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "mirroringDeploymentGroupId", + "mirroringDeploymentGroup", + ) + ) + ) + + +def test_create_mirroring_deployment_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_mirroring_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringDeploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_create_mirroring_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_deployment_group( + mirroring.CreateMirroringDeploymentGroupRequest(), + parent="parent_value", + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + mirroring_deployment_group_id="mirroring_deployment_group_id_value", + ) + + +def test_update_mirroring_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_deployment_group + ] = mock_rpc + + request = {} + client.update_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_mirroring_deployment_group_rest_required_fields( + request_type=mirroring.UpdateMirroringDeploymentGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_mirroring_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_mirroring_deployment_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.update_mirroring_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("mirroringDeploymentGroup",)) + ) + + +def test_update_mirroring_deployment_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "mirroring_deployment_group": { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_mirroring_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{mirroring_deployment_group.name=projects/*/locations/*/mirroringDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_mirroring_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_deployment_group( + mirroring.UpdateMirroringDeploymentGroupRequest(), + mirroring_deployment_group=mirroring.MirroringDeploymentGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_mirroring_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_deployment_group + ] = mock_rpc + + request = {} + client.delete_mirroring_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_mirroring_deployment_group_rest_required_fields( + request_type=mirroring.DeleteMirroringDeploymentGroupRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_mirroring_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_mirroring_deployment_group_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_mirroring_deployment_group._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_mirroring_deployment_group_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_mirroring_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringDeploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_mirroring_deployment_group_rest_flattened_error( + transport: str = "rest", +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_deployment_group( + mirroring.DeleteMirroringDeploymentGroupRequest(), + name="name_value", + ) + + +def test_list_mirroring_deployments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_mirroring_deployments + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_mirroring_deployments + ] = mock_rpc + + request = {} + client.list_mirroring_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_mirroring_deployments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_mirroring_deployments_rest_required_fields( + request_type=mirroring.ListMirroringDeploymentsRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_mirroring_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_mirroring_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_mirroring_deployments_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_mirroring_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_mirroring_deployments_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_mirroring_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringDeployments" + % client.transport._host, + args[1], + ) + + +def test_list_mirroring_deployments_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_mirroring_deployments( + mirroring.ListMirroringDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_mirroring_deployments_rest_pager(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + next_page_token="abc", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[], + next_page_token="def", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + ], + next_page_token="ghi", + ), + mirroring.ListMirroringDeploymentsResponse( + mirroring_deployments=[ + mirroring.MirroringDeployment(), + mirroring.MirroringDeployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + mirroring.ListMirroringDeploymentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_mirroring_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, mirroring.MirroringDeployment) for i in results) + + pages = list(client.list_mirroring_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_mirroring_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_mirroring_deployment + ] = mock_rpc + + request = {} + client.get_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_mirroring_deployment_rest_required_fields( + request_type=mirroring.GetMirroringDeploymentRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_mirroring_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_mirroring_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_mirroring_deployment_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_mirroring_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_mirroring_deployment_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = mirroring.MirroringDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_mirroring_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_mirroring_deployment_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_mirroring_deployment( + mirroring.GetMirroringDeploymentRequest(), + name="name_value", + ) + + +def test_create_mirroring_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_mirroring_deployment + ] = mock_rpc + + request = {} + client.create_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_mirroring_deployment_rest_required_fields( + request_type=mirroring.CreateMirroringDeploymentRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["mirroring_deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "mirroringDeploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "mirroringDeploymentId" in jsonified_request + assert ( + jsonified_request["mirroringDeploymentId"] + == request_init["mirroring_deployment_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["mirroringDeploymentId"] = "mirroring_deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_mirroring_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "mirroring_deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "mirroringDeploymentId" in jsonified_request + assert jsonified_request["mirroringDeploymentId"] == "mirroring_deployment_id_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_mirroring_deployment(request) + + expected_params = [ + ( + "mirroringDeploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_mirroring_deployment_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_mirroring_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "mirroringDeploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "mirroringDeploymentId", + "mirroringDeployment", + ) + ) + ) + + +def test_create_mirroring_deployment_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_mirroring_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/mirroringDeployments" + % client.transport._host, + args[1], + ) + + +def test_create_mirroring_deployment_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_mirroring_deployment( + mirroring.CreateMirroringDeploymentRequest(), + parent="parent_value", + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + mirroring_deployment_id="mirroring_deployment_id_value", + ) + + +def test_update_mirroring_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_mirroring_deployment + ] = mock_rpc + + request = {} + client.update_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_mirroring_deployment_rest_required_fields( + request_type=mirroring.UpdateMirroringDeploymentRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_mirroring_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_mirroring_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_mirroring_deployment_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_mirroring_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("mirroringDeployment",)) + ) + + +def test_update_mirroring_deployment_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "mirroring_deployment": { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_mirroring_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{mirroring_deployment.name=projects/*/locations/*/mirroringDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_mirroring_deployment_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_mirroring_deployment( + mirroring.UpdateMirroringDeploymentRequest(), + mirroring_deployment=mirroring.MirroringDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_mirroring_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_mirroring_deployment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_mirroring_deployment + ] = mock_rpc + + request = {} + client.delete_mirroring_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_mirroring_deployment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_mirroring_deployment_rest_required_fields( + request_type=mirroring.DeleteMirroringDeploymentRequest, +): + transport_class = transports.MirroringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_mirroring_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_mirroring_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_mirroring_deployment_rest_unset_required_fields(): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_mirroring_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_mirroring_deployment_rest_flattened(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_mirroring_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/mirroringDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_mirroring_deployment_rest_flattened_error(transport: str = "rest"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_mirroring_deployment( + mirroring.DeleteMirroringDeploymentRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MirroringClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MirroringClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MirroringClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MirroringClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MirroringClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MirroringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MirroringGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MirroringGrpcTransport, + transports.MirroringGrpcAsyncIOTransport, + transports.MirroringRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = MirroringClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_endpoint_groups_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringEndpointGroupsResponse() + client.list_mirroring_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_endpoint_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = mirroring.MirroringEndpointGroup() + client.get_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_endpoint_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_endpoint_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_endpoint_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_endpoint_group_associations_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + client.list_mirroring_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_endpoint_group_association_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = mirroring.MirroringEndpointGroupAssociation() + client.get_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_endpoint_group_association_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_endpoint_group_association_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_endpoint_group_association_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_deployment_groups_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringDeploymentGroupsResponse() + client.list_mirroring_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_deployment_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = mirroring.MirroringDeploymentGroup() + client.get_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_deployment_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_deployment_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_deployment_group_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_deployments_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + call.return_value = mirroring.ListMirroringDeploymentsResponse() + client.list_mirroring_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_deployment_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + call.return_value = mirroring.MirroringDeployment() + client.get_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_deployment_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_deployment_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_deployment_empty_call_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MirroringAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_groups_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_mirroring_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroup( + name="name_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringEndpointGroup.State.ACTIVE, + reconciling=True, + type_=mirroring.MirroringEndpointGroup.Type.DIRECT, + description="description_value", + ) + ) + await client.get_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_mirroring_endpoint_group_associations_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_mirroring_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_mirroring_endpoint_group_association_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringEndpointGroupAssociation( + name="name_value", + mirroring_endpoint_group="mirroring_endpoint_group_value", + network="network_value", + state=mirroring.MirroringEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + ) + await client.get_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_mirroring_endpoint_group_association_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_mirroring_endpoint_group_association_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_mirroring_endpoint_group_association_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_mirroring_deployment_groups_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_mirroring_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_mirroring_deployment_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeploymentGroup( + name="name_value", + network="network_value", + state=mirroring.MirroringDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + await client.get_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_mirroring_deployment_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_mirroring_deployment_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_group_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_mirroring_deployments_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.ListMirroringDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_mirroring_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_mirroring_deployment_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + mirroring.MirroringDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + ) + await client.get_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_mirroring_deployment_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_mirroring_deployment_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_mirroring_deployment_empty_call_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = MirroringClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_mirroring_endpoint_groups_rest_bad_request( + request_type=mirroring.ListMirroringEndpointGroupsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_mirroring_endpoint_groups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringEndpointGroupsRequest, + dict, + ], +) +def test_list_mirroring_endpoint_groups_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_mirroring_endpoint_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_mirroring_endpoint_groups_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_list_mirroring_endpoint_groups" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_list_mirroring_endpoint_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_list_mirroring_endpoint_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.ListMirroringEndpointGroupsRequest.pb( + mirroring.ListMirroringEndpointGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.ListMirroringEndpointGroupsResponse.to_json( + mirroring.ListMirroringEndpointGroupsResponse() + ) + req.return_value.content = return_value + + request = mirroring.ListMirroringEndpointGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.ListMirroringEndpointGroupsResponse() + post_with_metadata.return_value = ( + mirroring.ListMirroringEndpointGroupsResponse(), + metadata, + ) + + client.list_mirroring_endpoint_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_mirroring_endpoint_group_rest_bad_request( + request_type=mirroring.GetMirroringEndpointGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_mirroring_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringEndpointGroupRequest, + dict, + ], +) +def test_get_mirroring_endpoint_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroup( + name="name_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringEndpointGroup.State.ACTIVE, + reconciling=True, + type_=mirroring.MirroringEndpointGroup.Type.DIRECT, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_mirroring_endpoint_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroup) + assert response.name == "name_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringEndpointGroup.State.ACTIVE + assert response.reconciling is True + assert response.type_ == mirroring.MirroringEndpointGroup.Type.DIRECT + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_mirroring_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_get_mirroring_endpoint_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_get_mirroring_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_get_mirroring_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.GetMirroringEndpointGroupRequest.pb( + mirroring.GetMirroringEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.MirroringEndpointGroup.to_json( + mirroring.MirroringEndpointGroup() + ) + req.return_value.content = return_value + + request = mirroring.GetMirroringEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.MirroringEndpointGroup() + post_with_metadata.return_value = mirroring.MirroringEndpointGroup(), metadata + + client.get_mirroring_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_mirroring_endpoint_group_rest_bad_request( + request_type=mirroring.CreateMirroringEndpointGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_mirroring_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringEndpointGroupRequest, + dict, + ], +) +def test_create_mirroring_endpoint_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["mirroring_endpoint_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "mirroring_deployment_group": "mirroring_deployment_group_value", + "connected_deployment_groups": [ + { + "name": "name_value", + "locations": [{"location": "location_value", "state": 1}], + } + ], + "state": 1, + "reconciling": True, + "type_": 1, + "associations": [ + {"name": "name_value", "network": "network_value", "state": 1} + ], + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.CreateMirroringEndpointGroupRequest.meta.fields[ + "mirroring_endpoint_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_endpoint_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["mirroring_endpoint_group"][field])): + del request_init["mirroring_endpoint_group"][field][i][subfield] + else: + del request_init["mirroring_endpoint_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_mirroring_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_mirroring_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_create_mirroring_endpoint_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_create_mirroring_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_create_mirroring_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.CreateMirroringEndpointGroupRequest.pb( + mirroring.CreateMirroringEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.CreateMirroringEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_mirroring_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_mirroring_endpoint_group_rest_bad_request( + request_type=mirroring.UpdateMirroringEndpointGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "mirroring_endpoint_group": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_mirroring_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringEndpointGroupRequest, + dict, + ], +) +def test_update_mirroring_endpoint_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "mirroring_endpoint_group": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + } + request_init["mirroring_endpoint_group"] = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "mirroring_deployment_group": "mirroring_deployment_group_value", + "connected_deployment_groups": [ + { + "name": "name_value", + "locations": [{"location": "location_value", "state": 1}], + } + ], + "state": 1, + "reconciling": True, + "type_": 1, + "associations": [ + {"name": "name_value", "network": "network_value", "state": 1} + ], + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.UpdateMirroringEndpointGroupRequest.meta.fields[ + "mirroring_endpoint_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_endpoint_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["mirroring_endpoint_group"][field])): + del request_init["mirroring_endpoint_group"][field][i][subfield] + else: + del request_init["mirroring_endpoint_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_mirroring_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_mirroring_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_update_mirroring_endpoint_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_update_mirroring_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_update_mirroring_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.UpdateMirroringEndpointGroupRequest.pb( + mirroring.UpdateMirroringEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.UpdateMirroringEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_mirroring_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_mirroring_endpoint_group_rest_bad_request( + request_type=mirroring.DeleteMirroringEndpointGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_mirroring_endpoint_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringEndpointGroupRequest, + dict, + ], +) +def test_delete_mirroring_endpoint_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_mirroring_endpoint_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_mirroring_endpoint_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_delete_mirroring_endpoint_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_delete_mirroring_endpoint_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_delete_mirroring_endpoint_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.DeleteMirroringEndpointGroupRequest.pb( + mirroring.DeleteMirroringEndpointGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.DeleteMirroringEndpointGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_mirroring_endpoint_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_mirroring_endpoint_group_associations_rest_bad_request( + request_type=mirroring.ListMirroringEndpointGroupAssociationsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_mirroring_endpoint_group_associations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringEndpointGroupAssociationsRequest, + dict, + ], +) +def test_list_mirroring_endpoint_group_associations_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_mirroring_endpoint_group_associations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringEndpointGroupAssociationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_mirroring_endpoint_group_associations_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, + "post_list_mirroring_endpoint_group_associations", + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_list_mirroring_endpoint_group_associations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, + "pre_list_mirroring_endpoint_group_associations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.ListMirroringEndpointGroupAssociationsRequest.pb( + mirroring.ListMirroringEndpointGroupAssociationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse.to_json( + mirroring.ListMirroringEndpointGroupAssociationsResponse() + ) + req.return_value.content = return_value + + request = mirroring.ListMirroringEndpointGroupAssociationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.ListMirroringEndpointGroupAssociationsResponse() + post_with_metadata.return_value = ( + mirroring.ListMirroringEndpointGroupAssociationsResponse(), + metadata, + ) + + client.list_mirroring_endpoint_group_associations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_mirroring_endpoint_group_association_rest_bad_request( + request_type=mirroring.GetMirroringEndpointGroupAssociationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_mirroring_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_get_mirroring_endpoint_group_association_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringEndpointGroupAssociation( + name="name_value", + mirroring_endpoint_group="mirroring_endpoint_group_value", + network="network_value", + state=mirroring.MirroringEndpointGroupAssociation.State.ACTIVE, + reconciling=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringEndpointGroupAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_mirroring_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringEndpointGroupAssociation) + assert response.name == "name_value" + assert response.mirroring_endpoint_group == "mirroring_endpoint_group_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringEndpointGroupAssociation.State.ACTIVE + assert response.reconciling is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_mirroring_endpoint_group_association_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, + "post_get_mirroring_endpoint_group_association", + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_get_mirroring_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, + "pre_get_mirroring_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.GetMirroringEndpointGroupAssociationRequest.pb( + mirroring.GetMirroringEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.MirroringEndpointGroupAssociation.to_json( + mirroring.MirroringEndpointGroupAssociation() + ) + req.return_value.content = return_value + + request = mirroring.GetMirroringEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.MirroringEndpointGroupAssociation() + post_with_metadata.return_value = ( + mirroring.MirroringEndpointGroupAssociation(), + metadata, + ) + + client.get_mirroring_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_mirroring_endpoint_group_association_rest_bad_request( + request_type=mirroring.CreateMirroringEndpointGroupAssociationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_mirroring_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_create_mirroring_endpoint_group_association_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["mirroring_endpoint_group_association"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "mirroring_endpoint_group": "mirroring_endpoint_group_value", + "network": "network_value", + "locations_details": [{"location": "location_value", "state": 1}], + "state": 1, + "reconciling": True, + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.CreateMirroringEndpointGroupAssociationRequest.meta.fields[ + "mirroring_endpoint_group_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_endpoint_group_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["mirroring_endpoint_group_association"][field]) + ): + del request_init["mirroring_endpoint_group_association"][field][i][ + subfield + ] + else: + del request_init["mirroring_endpoint_group_association"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_mirroring_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_mirroring_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, + "post_create_mirroring_endpoint_group_association", + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_create_mirroring_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, + "pre_create_mirroring_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.CreateMirroringEndpointGroupAssociationRequest.pb( + mirroring.CreateMirroringEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.CreateMirroringEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_mirroring_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_mirroring_endpoint_group_association_rest_bad_request( + request_type=mirroring.UpdateMirroringEndpointGroupAssociationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "mirroring_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_mirroring_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_update_mirroring_endpoint_group_association_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "mirroring_endpoint_group_association": { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + } + request_init["mirroring_endpoint_group_association"] = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "mirroring_endpoint_group": "mirroring_endpoint_group_value", + "network": "network_value", + "locations_details": [{"location": "location_value", "state": 1}], + "state": 1, + "reconciling": True, + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.UpdateMirroringEndpointGroupAssociationRequest.meta.fields[ + "mirroring_endpoint_group_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_endpoint_group_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["mirroring_endpoint_group_association"][field]) + ): + del request_init["mirroring_endpoint_group_association"][field][i][ + subfield + ] + else: + del request_init["mirroring_endpoint_group_association"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_mirroring_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_mirroring_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, + "post_update_mirroring_endpoint_group_association", + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_update_mirroring_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, + "pre_update_mirroring_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.UpdateMirroringEndpointGroupAssociationRequest.pb( + mirroring.UpdateMirroringEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_mirroring_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_mirroring_endpoint_group_association_rest_bad_request( + request_type=mirroring.DeleteMirroringEndpointGroupAssociationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_mirroring_endpoint_group_association(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringEndpointGroupAssociationRequest, + dict, + ], +) +def test_delete_mirroring_endpoint_group_association_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringEndpointGroupAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_mirroring_endpoint_group_association(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_mirroring_endpoint_group_association_rest_interceptors( + null_interceptor, +): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, + "post_delete_mirroring_endpoint_group_association", + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_delete_mirroring_endpoint_group_association_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, + "pre_delete_mirroring_endpoint_group_association", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.DeleteMirroringEndpointGroupAssociationRequest.pb( + mirroring.DeleteMirroringEndpointGroupAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_mirroring_endpoint_group_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_mirroring_deployment_groups_rest_bad_request( + request_type=mirroring.ListMirroringDeploymentGroupsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_mirroring_deployment_groups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringDeploymentGroupsRequest, + dict, + ], +) +def test_list_mirroring_deployment_groups_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_mirroring_deployment_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_mirroring_deployment_groups_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_list_mirroring_deployment_groups" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_list_mirroring_deployment_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_list_mirroring_deployment_groups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.ListMirroringDeploymentGroupsRequest.pb( + mirroring.ListMirroringDeploymentGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.ListMirroringDeploymentGroupsResponse.to_json( + mirroring.ListMirroringDeploymentGroupsResponse() + ) + req.return_value.content = return_value + + request = mirroring.ListMirroringDeploymentGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.ListMirroringDeploymentGroupsResponse() + post_with_metadata.return_value = ( + mirroring.ListMirroringDeploymentGroupsResponse(), + metadata, + ) + + client.list_mirroring_deployment_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_mirroring_deployment_group_rest_bad_request( + request_type=mirroring.GetMirroringDeploymentGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_mirroring_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringDeploymentGroupRequest, + dict, + ], +) +def test_get_mirroring_deployment_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeploymentGroup( + name="name_value", + network="network_value", + state=mirroring.MirroringDeploymentGroup.State.ACTIVE, + reconciling=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringDeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_mirroring_deployment_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeploymentGroup) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.state == mirroring.MirroringDeploymentGroup.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_mirroring_deployment_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_get_mirroring_deployment_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_get_mirroring_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_get_mirroring_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.GetMirroringDeploymentGroupRequest.pb( + mirroring.GetMirroringDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.MirroringDeploymentGroup.to_json( + mirroring.MirroringDeploymentGroup() + ) + req.return_value.content = return_value + + request = mirroring.GetMirroringDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.MirroringDeploymentGroup() + post_with_metadata.return_value = mirroring.MirroringDeploymentGroup(), metadata + + client.get_mirroring_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_mirroring_deployment_group_rest_bad_request( + request_type=mirroring.CreateMirroringDeploymentGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_mirroring_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringDeploymentGroupRequest, + dict, + ], +) +def test_create_mirroring_deployment_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["mirroring_deployment_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "connected_endpoint_groups": [{"name": "name_value"}], + "nested_deployments": [{"name": "name_value", "state": 1}], + "state": 1, + "reconciling": True, + "description": "description_value", + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.CreateMirroringDeploymentGroupRequest.meta.fields[ + "mirroring_deployment_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_deployment_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["mirroring_deployment_group"][field]) + ): + del request_init["mirroring_deployment_group"][field][i][subfield] + else: + del request_init["mirroring_deployment_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_mirroring_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_mirroring_deployment_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_create_mirroring_deployment_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_create_mirroring_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_create_mirroring_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.CreateMirroringDeploymentGroupRequest.pb( + mirroring.CreateMirroringDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.CreateMirroringDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_mirroring_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_mirroring_deployment_group_rest_bad_request( + request_type=mirroring.UpdateMirroringDeploymentGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "mirroring_deployment_group": { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_mirroring_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringDeploymentGroupRequest, + dict, + ], +) +def test_update_mirroring_deployment_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "mirroring_deployment_group": { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + } + request_init["mirroring_deployment_group"] = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "network": "network_value", + "connected_endpoint_groups": [{"name": "name_value"}], + "nested_deployments": [{"name": "name_value", "state": 1}], + "state": 1, + "reconciling": True, + "description": "description_value", + "locations": [{"location": "location_value", "state": 1}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.UpdateMirroringDeploymentGroupRequest.meta.fields[ + "mirroring_deployment_group" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_deployment_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["mirroring_deployment_group"][field]) + ): + del request_init["mirroring_deployment_group"][field][i][subfield] + else: + del request_init["mirroring_deployment_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_mirroring_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_mirroring_deployment_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_update_mirroring_deployment_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_update_mirroring_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_update_mirroring_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.UpdateMirroringDeploymentGroupRequest.pb( + mirroring.UpdateMirroringDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.UpdateMirroringDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_mirroring_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_mirroring_deployment_group_rest_bad_request( + request_type=mirroring.DeleteMirroringDeploymentGroupRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_mirroring_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringDeploymentGroupRequest, + dict, + ], +) +def test_delete_mirroring_deployment_group_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeploymentGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_mirroring_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_mirroring_deployment_group_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_delete_mirroring_deployment_group" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_delete_mirroring_deployment_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_delete_mirroring_deployment_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.DeleteMirroringDeploymentGroupRequest.pb( + mirroring.DeleteMirroringDeploymentGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.DeleteMirroringDeploymentGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_mirroring_deployment_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_mirroring_deployments_rest_bad_request( + request_type=mirroring.ListMirroringDeploymentsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_mirroring_deployments(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.ListMirroringDeploymentsRequest, + dict, + ], +) +def test_list_mirroring_deployments_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.ListMirroringDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.ListMirroringDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_mirroring_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMirroringDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_mirroring_deployments_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_list_mirroring_deployments" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_list_mirroring_deployments_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_list_mirroring_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.ListMirroringDeploymentsRequest.pb( + mirroring.ListMirroringDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.ListMirroringDeploymentsResponse.to_json( + mirroring.ListMirroringDeploymentsResponse() + ) + req.return_value.content = return_value + + request = mirroring.ListMirroringDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.ListMirroringDeploymentsResponse() + post_with_metadata.return_value = ( + mirroring.ListMirroringDeploymentsResponse(), + metadata, + ) + + client.list_mirroring_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_mirroring_deployment_rest_bad_request( + request_type=mirroring.GetMirroringDeploymentRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_mirroring_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.GetMirroringDeploymentRequest, + dict, + ], +) +def test_get_mirroring_deployment_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = mirroring.MirroringDeployment( + name="name_value", + forwarding_rule="forwarding_rule_value", + mirroring_deployment_group="mirroring_deployment_group_value", + state=mirroring.MirroringDeployment.State.ACTIVE, + reconciling=True, + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = mirroring.MirroringDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_mirroring_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, mirroring.MirroringDeployment) + assert response.name == "name_value" + assert response.forwarding_rule == "forwarding_rule_value" + assert response.mirroring_deployment_group == "mirroring_deployment_group_value" + assert response.state == mirroring.MirroringDeployment.State.ACTIVE + assert response.reconciling is True + assert response.description == "description_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_mirroring_deployment_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MirroringRestInterceptor, "post_get_mirroring_deployment" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_get_mirroring_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_get_mirroring_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.GetMirroringDeploymentRequest.pb( + mirroring.GetMirroringDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = mirroring.MirroringDeployment.to_json( + mirroring.MirroringDeployment() + ) + req.return_value.content = return_value + + request = mirroring.GetMirroringDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = mirroring.MirroringDeployment() + post_with_metadata.return_value = mirroring.MirroringDeployment(), metadata + + client.get_mirroring_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_mirroring_deployment_rest_bad_request( + request_type=mirroring.CreateMirroringDeploymentRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_mirroring_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.CreateMirroringDeploymentRequest, + dict, + ], +) +def test_create_mirroring_deployment_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["mirroring_deployment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "forwarding_rule": "forwarding_rule_value", + "mirroring_deployment_group": "mirroring_deployment_group_value", + "state": 1, + "reconciling": True, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.CreateMirroringDeploymentRequest.meta.fields[ + "mirroring_deployment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_deployment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["mirroring_deployment"][field])): + del request_init["mirroring_deployment"][field][i][subfield] + else: + del request_init["mirroring_deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_mirroring_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_mirroring_deployment_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_create_mirroring_deployment" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_create_mirroring_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_create_mirroring_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.CreateMirroringDeploymentRequest.pb( + mirroring.CreateMirroringDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.CreateMirroringDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_mirroring_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_mirroring_deployment_rest_bad_request( + request_type=mirroring.UpdateMirroringDeploymentRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "mirroring_deployment": { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_mirroring_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.UpdateMirroringDeploymentRequest, + dict, + ], +) +def test_update_mirroring_deployment_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "mirroring_deployment": { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + } + request_init["mirroring_deployment"] = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "forwarding_rule": "forwarding_rule_value", + "mirroring_deployment_group": "mirroring_deployment_group_value", + "state": 1, + "reconciling": True, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = mirroring.UpdateMirroringDeploymentRequest.meta.fields[ + "mirroring_deployment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "mirroring_deployment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["mirroring_deployment"][field])): + del request_init["mirroring_deployment"][field][i][subfield] + else: + del request_init["mirroring_deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_mirroring_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_mirroring_deployment_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_update_mirroring_deployment" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_update_mirroring_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_update_mirroring_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.UpdateMirroringDeploymentRequest.pb( + mirroring.UpdateMirroringDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.UpdateMirroringDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_mirroring_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_mirroring_deployment_rest_bad_request( + request_type=mirroring.DeleteMirroringDeploymentRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_mirroring_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + mirroring.DeleteMirroringDeploymentRequest, + dict, + ], +) +def test_delete_mirroring_deployment_rest_call_success(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/mirroringDeployments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_mirroring_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_mirroring_deployment_rest_interceptors(null_interceptor): + transport = transports.MirroringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MirroringRestInterceptor(), + ) + client = MirroringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MirroringRestInterceptor, "post_delete_mirroring_deployment" + ) as post, mock.patch.object( + transports.MirroringRestInterceptor, + "post_delete_mirroring_deployment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MirroringRestInterceptor, "pre_delete_mirroring_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = mirroring.DeleteMirroringDeploymentRequest.pb( + mirroring.DeleteMirroringDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = mirroring.DeleteMirroringDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_mirroring_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_endpoint_groups_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_groups), "__call__" + ) as call: + client.list_mirroring_endpoint_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_endpoint_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group), "__call__" + ) as call: + client.get_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_endpoint_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group), "__call__" + ) as call: + client.create_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_endpoint_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group), "__call__" + ) as call: + client.update_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_endpoint_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group), "__call__" + ) as call: + client.delete_mirroring_endpoint_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_endpoint_group_associations_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_endpoint_group_associations), "__call__" + ) as call: + client.list_mirroring_endpoint_group_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringEndpointGroupAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_endpoint_group_association_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_endpoint_group_association), "__call__" + ) as call: + client.get_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_endpoint_group_association_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_endpoint_group_association), "__call__" + ) as call: + client.create_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_endpoint_group_association_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_endpoint_group_association), "__call__" + ) as call: + client.update_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_endpoint_group_association_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_endpoint_group_association), "__call__" + ) as call: + client.delete_mirroring_endpoint_group_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringEndpointGroupAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_deployment_groups_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployment_groups), "__call__" + ) as call: + client.list_mirroring_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_deployment_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment_group), "__call__" + ) as call: + client.get_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_deployment_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment_group), "__call__" + ) as call: + client.create_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_deployment_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment_group), "__call__" + ) as call: + client.update_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_deployment_group_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment_group), "__call__" + ) as call: + client.delete_mirroring_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_mirroring_deployments_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_mirroring_deployments), "__call__" + ) as call: + client.list_mirroring_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.ListMirroringDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_mirroring_deployment_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_mirroring_deployment), "__call__" + ) as call: + client.get_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.GetMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_mirroring_deployment_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_mirroring_deployment), "__call__" + ) as call: + client.create_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.CreateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_mirroring_deployment_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_mirroring_deployment), "__call__" + ) as call: + client.update_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.UpdateMirroringDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_mirroring_deployment_empty_call_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_mirroring_deployment), "__call__" + ) as call: + client.delete_mirroring_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = mirroring.DeleteMirroringDeploymentRequest() + + assert args[0] == request_msg + + +def test_mirroring_rest_lro_client(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MirroringGrpcTransport, + ) + + +def test_mirroring_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MirroringTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_mirroring_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.mirroring.transports.MirroringTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MirroringTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_mirroring_endpoint_groups", + "get_mirroring_endpoint_group", + "create_mirroring_endpoint_group", + "update_mirroring_endpoint_group", + "delete_mirroring_endpoint_group", + "list_mirroring_endpoint_group_associations", + "get_mirroring_endpoint_group_association", + "create_mirroring_endpoint_group_association", + "update_mirroring_endpoint_group_association", + "delete_mirroring_endpoint_group_association", + "list_mirroring_deployment_groups", + "get_mirroring_deployment_group", + "create_mirroring_deployment_group", + "update_mirroring_deployment_group", + "delete_mirroring_deployment_group", + "list_mirroring_deployments", + "get_mirroring_deployment", + "create_mirroring_deployment", + "update_mirroring_deployment", + "delete_mirroring_deployment", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_mirroring_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.mirroring.transports.MirroringTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MirroringTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_mirroring_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.mirroring.transports.MirroringTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MirroringTransport() + adc.assert_called_once() + + +def test_mirroring_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MirroringClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MirroringGrpcTransport, + transports.MirroringGrpcAsyncIOTransport, + ], +) +def test_mirroring_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MirroringGrpcTransport, + transports.MirroringGrpcAsyncIOTransport, + transports.MirroringRestTransport, + ], +) +def test_mirroring_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MirroringGrpcTransport, grpc_helpers), + (transports.MirroringGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_mirroring_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.MirroringGrpcTransport, transports.MirroringGrpcAsyncIOTransport], +) +def test_mirroring_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_mirroring_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MirroringRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_mirroring_host_no_port(transport_name): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_mirroring_host_with_port(transport_name): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_mirroring_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MirroringClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MirroringClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_mirroring_endpoint_groups._session + session2 = client2.transport.list_mirroring_endpoint_groups._session + assert session1 != session2 + session1 = client1.transport.get_mirroring_endpoint_group._session + session2 = client2.transport.get_mirroring_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.create_mirroring_endpoint_group._session + session2 = client2.transport.create_mirroring_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.update_mirroring_endpoint_group._session + session2 = client2.transport.update_mirroring_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.delete_mirroring_endpoint_group._session + session2 = client2.transport.delete_mirroring_endpoint_group._session + assert session1 != session2 + session1 = client1.transport.list_mirroring_endpoint_group_associations._session + session2 = client2.transport.list_mirroring_endpoint_group_associations._session + assert session1 != session2 + session1 = client1.transport.get_mirroring_endpoint_group_association._session + session2 = client2.transport.get_mirroring_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.create_mirroring_endpoint_group_association._session + session2 = client2.transport.create_mirroring_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.update_mirroring_endpoint_group_association._session + session2 = client2.transport.update_mirroring_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.delete_mirroring_endpoint_group_association._session + session2 = client2.transport.delete_mirroring_endpoint_group_association._session + assert session1 != session2 + session1 = client1.transport.list_mirroring_deployment_groups._session + session2 = client2.transport.list_mirroring_deployment_groups._session + assert session1 != session2 + session1 = client1.transport.get_mirroring_deployment_group._session + session2 = client2.transport.get_mirroring_deployment_group._session + assert session1 != session2 + session1 = client1.transport.create_mirroring_deployment_group._session + session2 = client2.transport.create_mirroring_deployment_group._session + assert session1 != session2 + session1 = client1.transport.update_mirroring_deployment_group._session + session2 = client2.transport.update_mirroring_deployment_group._session + assert session1 != session2 + session1 = client1.transport.delete_mirroring_deployment_group._session + session2 = client2.transport.delete_mirroring_deployment_group._session + assert session1 != session2 + session1 = client1.transport.list_mirroring_deployments._session + session2 = client2.transport.list_mirroring_deployments._session + assert session1 != session2 + session1 = client1.transport.get_mirroring_deployment._session + session2 = client2.transport.get_mirroring_deployment._session + assert session1 != session2 + session1 = client1.transport.create_mirroring_deployment._session + session2 = client2.transport.create_mirroring_deployment._session + assert session1 != session2 + session1 = client1.transport.update_mirroring_deployment._session + session2 = client2.transport.update_mirroring_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_mirroring_deployment._session + session2 = client2.transport.delete_mirroring_deployment._session + assert session1 != session2 + + +def test_mirroring_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MirroringGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_mirroring_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MirroringGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.MirroringGrpcTransport, transports.MirroringGrpcAsyncIOTransport], +) +def test_mirroring_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.MirroringGrpcTransport, transports.MirroringGrpcAsyncIOTransport], +) +def test_mirroring_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_mirroring_grpc_lro_client(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_mirroring_grpc_lro_async_client(): + client = MirroringAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_forwarding_rule_path(): + project = "squid" + forwarding_rule = "clam" + expected = "projects/{project}/global/forwardingRules/{forwarding_rule}".format( + project=project, + forwarding_rule=forwarding_rule, + ) + actual = MirroringClient.forwarding_rule_path(project, forwarding_rule) + assert expected == actual + + +def test_parse_forwarding_rule_path(): + expected = { + "project": "whelk", + "forwarding_rule": "octopus", + } + path = MirroringClient.forwarding_rule_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_forwarding_rule_path(path) + assert expected == actual + + +def test_mirroring_deployment_path(): + project = "oyster" + location = "nudibranch" + mirroring_deployment = "cuttlefish" + expected = "projects/{project}/locations/{location}/mirroringDeployments/{mirroring_deployment}".format( + project=project, + location=location, + mirroring_deployment=mirroring_deployment, + ) + actual = MirroringClient.mirroring_deployment_path( + project, location, mirroring_deployment + ) + assert expected == actual + + +def test_parse_mirroring_deployment_path(): + expected = { + "project": "mussel", + "location": "winkle", + "mirroring_deployment": "nautilus", + } + path = MirroringClient.mirroring_deployment_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_mirroring_deployment_path(path) + assert expected == actual + + +def test_mirroring_deployment_group_path(): + project = "scallop" + location = "abalone" + mirroring_deployment_group = "squid" + expected = "projects/{project}/locations/{location}/mirroringDeploymentGroups/{mirroring_deployment_group}".format( + project=project, + location=location, + mirroring_deployment_group=mirroring_deployment_group, + ) + actual = MirroringClient.mirroring_deployment_group_path( + project, location, mirroring_deployment_group + ) + assert expected == actual + + +def test_parse_mirroring_deployment_group_path(): + expected = { + "project": "clam", + "location": "whelk", + "mirroring_deployment_group": "octopus", + } + path = MirroringClient.mirroring_deployment_group_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_mirroring_deployment_group_path(path) + assert expected == actual + + +def test_mirroring_endpoint_group_path(): + project = "oyster" + location = "nudibranch" + mirroring_endpoint_group = "cuttlefish" + expected = "projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group}".format( + project=project, + location=location, + mirroring_endpoint_group=mirroring_endpoint_group, + ) + actual = MirroringClient.mirroring_endpoint_group_path( + project, location, mirroring_endpoint_group + ) + assert expected == actual + + +def test_parse_mirroring_endpoint_group_path(): + expected = { + "project": "mussel", + "location": "winkle", + "mirroring_endpoint_group": "nautilus", + } + path = MirroringClient.mirroring_endpoint_group_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_mirroring_endpoint_group_path(path) + assert expected == actual + + +def test_mirroring_endpoint_group_association_path(): + project = "scallop" + location = "abalone" + mirroring_endpoint_group_association = "squid" + expected = "projects/{project}/locations/{location}/mirroringEndpointGroupAssociations/{mirroring_endpoint_group_association}".format( + project=project, + location=location, + mirroring_endpoint_group_association=mirroring_endpoint_group_association, + ) + actual = MirroringClient.mirroring_endpoint_group_association_path( + project, location, mirroring_endpoint_group_association + ) + assert expected == actual + + +def test_parse_mirroring_endpoint_group_association_path(): + expected = { + "project": "clam", + "location": "whelk", + "mirroring_endpoint_group_association": "octopus", + } + path = MirroringClient.mirroring_endpoint_group_association_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_mirroring_endpoint_group_association_path(path) + assert expected == actual + + +def test_network_path(): + project = "oyster" + network = "nudibranch" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = MirroringClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "cuttlefish", + "network": "mussel", + } + path = MirroringClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MirroringClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = MirroringClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MirroringClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = MirroringClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MirroringClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = MirroringClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = MirroringClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = MirroringClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MirroringClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = MirroringClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MirroringClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MirroringTransport, "_prep_wrapped_messages" + ) as prep: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MirroringTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MirroringClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = MirroringClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = MirroringAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MirroringAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MirroringClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MirroringClient, transports.MirroringGrpcTransport), + (MirroringAsyncClient, transports.MirroringGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_network_security.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_network_security.py index f2a23ddb09a4..96cdd0cd1fb7 100644 --- a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_network_security.py +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_network_security.py @@ -75,11 +75,42 @@ pagers, transports, ) +from google.cloud.network_security_v1alpha1.types import ( + authorization_policy as gcn_authorization_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + authz_policy as gcn_authz_policy, +) +from google.cloud.network_security_v1alpha1.types import backend_authentication_config +from google.cloud.network_security_v1alpha1.types import ( + backend_authentication_config as gcn_backend_authentication_config, +) from google.cloud.network_security_v1alpha1.types import ( client_tls_policy as gcn_client_tls_policy, ) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy as gcn_gateway_security_policy, +) +from google.cloud.network_security_v1alpha1.types import gateway_security_policy_rule +from google.cloud.network_security_v1alpha1.types import ( + gateway_security_policy_rule as gcn_gateway_security_policy_rule, +) +from google.cloud.network_security_v1alpha1.types import ( + server_tls_policy as gcn_server_tls_policy, +) +from google.cloud.network_security_v1alpha1.types import ( + tls_inspection_policy as gcn_tls_inspection_policy, +) +from google.cloud.network_security_v1alpha1.types import url_list as gcn_url_list +from google.cloud.network_security_v1alpha1.types import authorization_policy +from google.cloud.network_security_v1alpha1.types import authz_policy from google.cloud.network_security_v1alpha1.types import client_tls_policy -from google.cloud.network_security_v1alpha1.types import common, tls +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import server_tls_policy +from google.cloud.network_security_v1alpha1.types import tls +from google.cloud.network_security_v1alpha1.types import tls_inspection_policy +from google.cloud.network_security_v1alpha1.types import url_list CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -1158,11 +1189,11 @@ def test_network_security_client_create_channel_credentials_file( @pytest.mark.parametrize( "request_type", [ - client_tls_policy.ListClientTlsPoliciesRequest, + authorization_policy.ListAuthorizationPoliciesRequest, dict, ], ) -def test_list_client_tls_policies(request_type, transport: str = "grpc"): +def test_list_authorization_policies(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1174,26 +1205,26 @@ def test_list_client_tls_policies(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ListClientTlsPoliciesResponse( + call.return_value = authorization_policy.ListAuthorizationPoliciesResponse( next_page_token="next_page_token_value", ) - response = client.list_client_tls_policies(request) + response = client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = client_tls_policy.ListClientTlsPoliciesRequest() + request = authorization_policy.ListAuthorizationPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientTlsPoliciesPager) + assert isinstance(response, pagers.ListAuthorizationPoliciesPager) assert response.next_page_token == "next_page_token_value" -def test_list_client_tls_policies_non_empty_request_with_auto_populated_field(): +def test_list_authorization_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( @@ -1204,28 +1235,28 @@ def test_list_client_tls_policies_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = client_tls_policy.ListClientTlsPoliciesRequest( + request = authorization_policy.ListAuthorizationPoliciesRequest( parent="parent_value", page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_client_tls_policies(request=request) + client.list_authorization_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == client_tls_policy.ListClientTlsPoliciesRequest( + assert args[0] == authorization_policy.ListAuthorizationPoliciesRequest( parent="parent_value", page_token="page_token_value", ) -def test_list_client_tls_policies_use_cached_wrapped_rpc(): +def test_list_authorization_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1240,7 +1271,7 @@ def test_list_client_tls_policies_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_client_tls_policies + client._transport.list_authorization_policies in client._transport._wrapped_methods ) @@ -1250,15 +1281,15 @@ def test_list_client_tls_policies_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_client_tls_policies + client._transport.list_authorization_policies ] = mock_rpc request = {} - client.list_client_tls_policies(request) + client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_client_tls_policies(request) + client.list_authorization_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1266,7 +1297,7 @@ def test_list_client_tls_policies_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( +async def test_list_authorization_policies_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1283,7 +1314,7 @@ async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_client_tls_policies + client._client._transport.list_authorization_policies in client._client._transport._wrapped_methods ) @@ -1291,16 +1322,16 @@ async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_client_tls_policies + client._client._transport.list_authorization_policies ] = mock_rpc request = {} - await client.list_client_tls_policies(request) + await client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_client_tls_policies(request) + await client.list_authorization_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1308,9 +1339,9 @@ async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_client_tls_policies_async( +async def test_list_authorization_policies_async( transport: str = "grpc_asyncio", - request_type=client_tls_policy.ListClientTlsPoliciesRequest, + request_type=authorization_policy.ListAuthorizationPoliciesRequest, ): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), @@ -1323,49 +1354,49 @@ async def test_list_client_tls_policies_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ListClientTlsPoliciesResponse( + authorization_policy.ListAuthorizationPoliciesResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_client_tls_policies(request) + response = await client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = client_tls_policy.ListClientTlsPoliciesRequest() + request = authorization_policy.ListAuthorizationPoliciesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientTlsPoliciesAsyncPager) + assert isinstance(response, pagers.ListAuthorizationPoliciesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_client_tls_policies_async_from_dict(): - await test_list_client_tls_policies_async(request_type=dict) +async def test_list_authorization_policies_async_from_dict(): + await test_list_authorization_policies_async(request_type=dict) -def test_list_client_tls_policies_field_headers(): +def test_list_authorization_policies_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.ListClientTlsPoliciesRequest() + request = authorization_policy.ListAuthorizationPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: - call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() - client.list_client_tls_policies(request) + call.return_value = authorization_policy.ListAuthorizationPoliciesResponse() + client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1381,25 +1412,25 @@ def test_list_client_tls_policies_field_headers(): @pytest.mark.asyncio -async def test_list_client_tls_policies_field_headers_async(): +async def test_list_authorization_policies_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.ListClientTlsPoliciesRequest() + request = authorization_policy.ListAuthorizationPoliciesRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ListClientTlsPoliciesResponse() + authorization_policy.ListAuthorizationPoliciesResponse() ) - await client.list_client_tls_policies(request) + await client.list_authorization_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1414,20 +1445,20 @@ async def test_list_client_tls_policies_field_headers_async(): ) in kw["metadata"] -def test_list_client_tls_policies_flattened(): +def test_list_authorization_policies_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + call.return_value = authorization_policy.ListAuthorizationPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_client_tls_policies( + client.list_authorization_policies( parent="parent_value", ) @@ -1440,7 +1471,7 @@ def test_list_client_tls_policies_flattened(): assert arg == mock_val -def test_list_client_tls_policies_flattened_error(): +def test_list_authorization_policies_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1448,31 +1479,31 @@ def test_list_client_tls_policies_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_client_tls_policies( - client_tls_policy.ListClientTlsPoliciesRequest(), + client.list_authorization_policies( + authorization_policy.ListAuthorizationPoliciesRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_client_tls_policies_flattened_async(): +async def test_list_authorization_policies_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + call.return_value = authorization_policy.ListAuthorizationPoliciesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ListClientTlsPoliciesResponse() + authorization_policy.ListAuthorizationPoliciesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_client_tls_policies( + response = await client.list_authorization_policies( parent="parent_value", ) @@ -1486,7 +1517,7 @@ async def test_list_client_tls_policies_flattened_async(): @pytest.mark.asyncio -async def test_list_client_tls_policies_flattened_error_async(): +async def test_list_authorization_policies_flattened_error_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) @@ -1494,13 +1525,13 @@ async def test_list_client_tls_policies_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_client_tls_policies( - client_tls_policy.ListClientTlsPoliciesRequest(), + await client.list_authorization_policies( + authorization_policy.ListAuthorizationPoliciesRequest(), parent="parent_value", ) -def test_list_client_tls_policies_pager(transport_name: str = "grpc"): +def test_list_authorization_policies_pager(transport_name: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -1508,32 +1539,32 @@ def test_list_client_tls_policies_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], next_page_token="abc", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[], + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[], next_page_token="def", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), ], next_page_token="ghi", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], ), RuntimeError, @@ -1545,7 +1576,7 @@ def test_list_client_tls_policies_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_client_tls_policies( + pager = client.list_authorization_policies( request={}, retry=retry, timeout=timeout ) @@ -1555,10 +1586,12 @@ def test_list_client_tls_policies_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in results) + assert all( + isinstance(i, authorization_policy.AuthorizationPolicy) for i in results + ) -def test_list_client_tls_policies_pages(transport_name: str = "grpc"): +def test_list_authorization_policies_pages(transport_name: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -1566,82 +1599,82 @@ def test_list_client_tls_policies_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authorization_policies), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], next_page_token="abc", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[], + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[], next_page_token="def", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), ], next_page_token="ghi", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], ), RuntimeError, ) - pages = list(client.list_client_tls_policies(request={}).pages) + pages = list(client.list_authorization_policies(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_client_tls_policies_async_pager(): +async def test_list_authorization_policies_async_pager(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), + type(client.transport.list_authorization_policies), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], next_page_token="abc", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[], + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[], next_page_token="def", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), ], next_page_token="ghi", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], ), RuntimeError, ) - async_pager = await client.list_client_tls_policies( + async_pager = await client.list_authorization_policies( request={}, ) assert async_pager.next_page_token == "abc" @@ -1650,45 +1683,47 @@ async def test_list_client_tls_policies_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in responses) + assert all( + isinstance(i, authorization_policy.AuthorizationPolicy) for i in responses + ) @pytest.mark.asyncio -async def test_list_client_tls_policies_async_pages(): +async def test_list_authorization_policies_async_pages(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), + type(client.transport.list_authorization_policies), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], next_page_token="abc", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[], + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[], next_page_token="def", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), ], next_page_token="ghi", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), ], ), RuntimeError, @@ -1697,7 +1732,7 @@ async def test_list_client_tls_policies_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_client_tls_policies(request={}) + await client.list_authorization_policies(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -1707,11 +1742,11 @@ async def test_list_client_tls_policies_async_pages(): @pytest.mark.parametrize( "request_type", [ - client_tls_policy.GetClientTlsPolicyRequest, + authorization_policy.GetAuthorizationPolicyRequest, dict, ], ) -def test_get_client_tls_policy(request_type, transport: str = "grpc"): +def test_get_authorization_policy(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1723,30 +1758,30 @@ def test_get_client_tls_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ClientTlsPolicy( + call.return_value = authorization_policy.AuthorizationPolicy( name="name_value", description="description_value", - sni="sni_value", + action=authorization_policy.AuthorizationPolicy.Action.ALLOW, ) - response = client.get_client_tls_policy(request) + response = client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = client_tls_policy.GetClientTlsPolicyRequest() + request = authorization_policy.GetAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, client_tls_policy.ClientTlsPolicy) + assert isinstance(response, authorization_policy.AuthorizationPolicy) assert response.name == "name_value" assert response.description == "description_value" - assert response.sni == "sni_value" + assert response.action == authorization_policy.AuthorizationPolicy.Action.ALLOW -def test_get_client_tls_policy_non_empty_request_with_auto_populated_field(): +def test_get_authorization_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( @@ -1757,26 +1792,26 @@ def test_get_client_tls_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = client_tls_policy.GetClientTlsPolicyRequest( + request = authorization_policy.GetAuthorizationPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_client_tls_policy(request=request) + client.get_authorization_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == client_tls_policy.GetClientTlsPolicyRequest( + assert args[0] == authorization_policy.GetAuthorizationPolicyRequest( name="name_value", ) -def test_get_client_tls_policy_use_cached_wrapped_rpc(): +def test_get_authorization_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1791,7 +1826,7 @@ def test_get_client_tls_policy_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_client_tls_policy + client._transport.get_authorization_policy in client._transport._wrapped_methods ) @@ -1801,15 +1836,15 @@ def test_get_client_tls_policy_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_client_tls_policy + client._transport.get_authorization_policy ] = mock_rpc request = {} - client.get_client_tls_policy(request) + client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_client_tls_policy(request) + client.get_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1817,7 +1852,7 @@ def test_get_client_tls_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( +async def test_get_authorization_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1834,7 +1869,7 @@ async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_client_tls_policy + client._client._transport.get_authorization_policy in client._client._transport._wrapped_methods ) @@ -1842,16 +1877,16 @@ async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_client_tls_policy + client._client._transport.get_authorization_policy ] = mock_rpc request = {} - await client.get_client_tls_policy(request) + await client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_client_tls_policy(request) + await client.get_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1859,9 +1894,9 @@ async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_client_tls_policy_async( +async def test_get_authorization_policy_async( transport: str = "grpc_asyncio", - request_type=client_tls_policy.GetClientTlsPolicyRequest, + request_type=authorization_policy.GetAuthorizationPolicyRequest, ): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), @@ -1874,53 +1909,53 @@ async def test_get_client_tls_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ClientTlsPolicy( + authorization_policy.AuthorizationPolicy( name="name_value", description="description_value", - sni="sni_value", + action=authorization_policy.AuthorizationPolicy.Action.ALLOW, ) ) - response = await client.get_client_tls_policy(request) + response = await client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = client_tls_policy.GetClientTlsPolicyRequest() + request = authorization_policy.GetAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, client_tls_policy.ClientTlsPolicy) + assert isinstance(response, authorization_policy.AuthorizationPolicy) assert response.name == "name_value" assert response.description == "description_value" - assert response.sni == "sni_value" + assert response.action == authorization_policy.AuthorizationPolicy.Action.ALLOW @pytest.mark.asyncio -async def test_get_client_tls_policy_async_from_dict(): - await test_get_client_tls_policy_async(request_type=dict) +async def test_get_authorization_policy_async_from_dict(): + await test_get_authorization_policy_async(request_type=dict) -def test_get_client_tls_policy_field_headers(): +def test_get_authorization_policy_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.GetClientTlsPolicyRequest() + request = authorization_policy.GetAuthorizationPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: - call.return_value = client_tls_policy.ClientTlsPolicy() - client.get_client_tls_policy(request) + call.return_value = authorization_policy.AuthorizationPolicy() + client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1936,25 +1971,25 @@ def test_get_client_tls_policy_field_headers(): @pytest.mark.asyncio -async def test_get_client_tls_policy_field_headers_async(): +async def test_get_authorization_policy_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.GetClientTlsPolicyRequest() + request = authorization_policy.GetAuthorizationPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ClientTlsPolicy() + authorization_policy.AuthorizationPolicy() ) - await client.get_client_tls_policy(request) + await client.get_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1969,20 +2004,20 @@ async def test_get_client_tls_policy_field_headers_async(): ) in kw["metadata"] -def test_get_client_tls_policy_flattened(): +def test_get_authorization_policy_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ClientTlsPolicy() + call.return_value = authorization_policy.AuthorizationPolicy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_client_tls_policy( + client.get_authorization_policy( name="name_value", ) @@ -1995,7 +2030,7 @@ def test_get_client_tls_policy_flattened(): assert arg == mock_val -def test_get_client_tls_policy_flattened_error(): +def test_get_authorization_policy_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2003,31 +2038,31 @@ def test_get_client_tls_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_client_tls_policy( - client_tls_policy.GetClientTlsPolicyRequest(), + client.get_authorization_policy( + authorization_policy.GetAuthorizationPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_client_tls_policy_flattened_async(): +async def test_get_authorization_policy_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.get_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = client_tls_policy.ClientTlsPolicy() + call.return_value = authorization_policy.AuthorizationPolicy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ClientTlsPolicy() + authorization_policy.AuthorizationPolicy() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_client_tls_policy( + response = await client.get_authorization_policy( name="name_value", ) @@ -2041,7 +2076,7 @@ async def test_get_client_tls_policy_flattened_async(): @pytest.mark.asyncio -async def test_get_client_tls_policy_flattened_error_async(): +async def test_get_authorization_policy_flattened_error_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) @@ -2049,8 +2084,8 @@ async def test_get_client_tls_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_client_tls_policy( - client_tls_policy.GetClientTlsPolicyRequest(), + await client.get_authorization_policy( + authorization_policy.GetAuthorizationPolicyRequest(), name="name_value", ) @@ -2058,11 +2093,11 @@ async def test_get_client_tls_policy_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - gcn_client_tls_policy.CreateClientTlsPolicyRequest, + gcn_authorization_policy.CreateAuthorizationPolicyRequest, dict, ], ) -def test_create_client_tls_policy(request_type, transport: str = "grpc"): +def test_create_authorization_policy(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2074,23 +2109,23 @@ def test_create_client_tls_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_client_tls_policy(request) + response = client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_client_tls_policy_non_empty_request_with_auto_populated_field(): +def test_create_authorization_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( @@ -2101,28 +2136,28 @@ def test_create_client_tls_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest( + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest( parent="parent_value", - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy_id="authorization_policy_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_client_tls_policy(request=request) + client.create_authorization_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcn_client_tls_policy.CreateClientTlsPolicyRequest( + assert args[0] == gcn_authorization_policy.CreateAuthorizationPolicyRequest( parent="parent_value", - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy_id="authorization_policy_id_value", ) -def test_create_client_tls_policy_use_cached_wrapped_rpc(): +def test_create_authorization_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2137,7 +2172,7 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_client_tls_policy + client._transport.create_authorization_policy in client._transport._wrapped_methods ) @@ -2147,10 +2182,10 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_client_tls_policy + client._transport.create_authorization_policy ] = mock_rpc request = {} - client.create_client_tls_policy(request) + client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2160,7 +2195,7 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_client_tls_policy(request) + client.create_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2168,7 +2203,7 @@ def test_create_client_tls_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( +async def test_create_authorization_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2185,7 +2220,7 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_client_tls_policy + client._client._transport.create_authorization_policy in client._client._transport._wrapped_methods ) @@ -2193,11 +2228,11 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_client_tls_policy + client._client._transport.create_authorization_policy ] = mock_rpc request = {} - await client.create_client_tls_policy(request) + await client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2207,7 +2242,7 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_client_tls_policy(request) + await client.create_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2215,9 +2250,9 @@ async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_client_tls_policy_async( +async def test_create_authorization_policy_async( transport: str = "grpc_asyncio", - request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, + request_type=gcn_authorization_policy.CreateAuthorizationPolicyRequest, ): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), @@ -2230,18 +2265,18 @@ async def test_create_client_tls_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_client_tls_policy(request) + response = await client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -2249,27 +2284,27 @@ async def test_create_client_tls_policy_async( @pytest.mark.asyncio -async def test_create_client_tls_policy_async_from_dict(): - await test_create_client_tls_policy_async(request_type=dict) +async def test_create_authorization_policy_async_from_dict(): + await test_create_authorization_policy_async(request_type=dict) -def test_create_client_tls_policy_field_headers(): +def test_create_authorization_policy_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_client_tls_policy(request) + client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2285,25 +2320,25 @@ def test_create_client_tls_policy_field_headers(): @pytest.mark.asyncio -async def test_create_client_tls_policy_field_headers_async(): +async def test_create_authorization_policy_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_client_tls_policy(request) + await client.create_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2318,23 +2353,25 @@ async def test_create_client_tls_policy_field_headers_async(): ) in kw["metadata"] -def test_create_client_tls_policy_flattened(): +def test_create_authorization_policy_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_client_tls_policy( + client.create_authorization_policy( parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", ) # Establish that the underlying call was made with the expected @@ -2344,15 +2381,15 @@ def test_create_client_tls_policy_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].client_tls_policy - mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + arg = args[0].authorization_policy + mock_val = gcn_authorization_policy.AuthorizationPolicy(name="name_value") assert arg == mock_val - arg = args[0].client_tls_policy_id - mock_val = "client_tls_policy_id_value" + arg = args[0].authorization_policy_id + mock_val = "authorization_policy_id_value" assert arg == mock_val -def test_create_client_tls_policy_flattened_error(): +def test_create_authorization_policy_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2360,23 +2397,25 @@ def test_create_client_tls_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_client_tls_policy( - gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + client.create_authorization_policy( + gcn_authorization_policy.CreateAuthorizationPolicyRequest(), parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", ) @pytest.mark.asyncio -async def test_create_client_tls_policy_flattened_async(): +async def test_create_authorization_policy_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2386,10 +2425,12 @@ async def test_create_client_tls_policy_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_client_tls_policy( + response = await client.create_authorization_policy( parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", ) # Establish that the underlying call was made with the expected @@ -2399,16 +2440,16 @@ async def test_create_client_tls_policy_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].client_tls_policy - mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + arg = args[0].authorization_policy + mock_val = gcn_authorization_policy.AuthorizationPolicy(name="name_value") assert arg == mock_val - arg = args[0].client_tls_policy_id - mock_val = "client_tls_policy_id_value" + arg = args[0].authorization_policy_id + mock_val = "authorization_policy_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_client_tls_policy_flattened_error_async(): +async def test_create_authorization_policy_flattened_error_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) @@ -2416,22 +2457,24 @@ async def test_create_client_tls_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_client_tls_policy( - gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + await client.create_authorization_policy( + gcn_authorization_policy.CreateAuthorizationPolicyRequest(), parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", ) @pytest.mark.parametrize( "request_type", [ - gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + gcn_authorization_policy.UpdateAuthorizationPolicyRequest, dict, ], ) -def test_update_client_tls_policy(request_type, transport: str = "grpc"): +def test_update_authorization_policy(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2443,23 +2486,23 @@ def test_update_client_tls_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_client_tls_policy(request) + response = client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_client_tls_policy_non_empty_request_with_auto_populated_field(): +def test_update_authorization_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( @@ -2470,22 +2513,22 @@ def test_update_client_tls_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_client_tls_policy(request=request) + client.update_authorization_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + assert args[0] == gcn_authorization_policy.UpdateAuthorizationPolicyRequest() -def test_update_client_tls_policy_use_cached_wrapped_rpc(): +def test_update_authorization_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2500,7 +2543,7 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_client_tls_policy + client._transport.update_authorization_policy in client._transport._wrapped_methods ) @@ -2510,10 +2553,10 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_client_tls_policy + client._transport.update_authorization_policy ] = mock_rpc request = {} - client.update_client_tls_policy(request) + client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2523,7 +2566,7 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_client_tls_policy(request) + client.update_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2531,7 +2574,7 @@ def test_update_client_tls_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( +async def test_update_authorization_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2548,7 +2591,7 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_client_tls_policy + client._client._transport.update_authorization_policy in client._client._transport._wrapped_methods ) @@ -2556,11 +2599,11 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_client_tls_policy + client._client._transport.update_authorization_policy ] = mock_rpc request = {} - await client.update_client_tls_policy(request) + await client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2570,7 +2613,7 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_client_tls_policy(request) + await client.update_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2578,9 +2621,9 @@ async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_client_tls_policy_async( +async def test_update_authorization_policy_async( transport: str = "grpc_asyncio", - request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + request_type=gcn_authorization_policy.UpdateAuthorizationPolicyRequest, ): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), @@ -2593,18 +2636,18 @@ async def test_update_client_tls_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_client_tls_policy(request) + response = await client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -2612,27 +2655,27 @@ async def test_update_client_tls_policy_async( @pytest.mark.asyncio -async def test_update_client_tls_policy_async_from_dict(): - await test_update_client_tls_policy_async(request_type=dict) +async def test_update_authorization_policy_async_from_dict(): + await test_update_authorization_policy_async(request_type=dict) -def test_update_client_tls_policy_field_headers(): +def test_update_authorization_policy_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() - request.client_tls_policy.name = "name_value" + request.authorization_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_client_tls_policy(request) + client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2643,30 +2686,30 @@ def test_update_client_tls_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "client_tls_policy.name=name_value", + "authorization_policy.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_client_tls_policy_field_headers_async(): +async def test_update_authorization_policy_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() - request.client_tls_policy.name = "name_value" + request.authorization_policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_client_tls_policy(request) + await client.update_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2677,25 +2720,27 @@ async def test_update_client_tls_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "client_tls_policy.name=name_value", + "authorization_policy.name=name_value", ) in kw["metadata"] -def test_update_client_tls_policy_flattened(): +def test_update_authorization_policy_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_client_tls_policy( - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client.update_authorization_policy( + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2703,15 +2748,15 @@ def test_update_client_tls_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].client_tls_policy - mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + arg = args[0].authorization_policy + mock_val = gcn_authorization_policy.AuthorizationPolicy(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_client_tls_policy_flattened_error(): +def test_update_authorization_policy_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2719,22 +2764,24 @@ def test_update_client_tls_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_client_tls_policy( - gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client.update_authorization_policy( + gcn_authorization_policy.UpdateAuthorizationPolicyRequest(), + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_client_tls_policy_flattened_async(): +async def test_update_authorization_policy_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2744,8 +2791,10 @@ async def test_update_client_tls_policy_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_client_tls_policy( - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + response = await client.update_authorization_policy( + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2753,8 +2802,8 @@ async def test_update_client_tls_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].client_tls_policy - mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + arg = args[0].authorization_policy + mock_val = gcn_authorization_policy.AuthorizationPolicy(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -2762,7 +2811,7 @@ async def test_update_client_tls_policy_flattened_async(): @pytest.mark.asyncio -async def test_update_client_tls_policy_flattened_error_async(): +async def test_update_authorization_policy_flattened_error_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) @@ -2770,9 +2819,11 @@ async def test_update_client_tls_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_client_tls_policy( - gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + await client.update_authorization_policy( + gcn_authorization_policy.UpdateAuthorizationPolicyRequest(), + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2780,11 +2831,11 @@ async def test_update_client_tls_policy_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - client_tls_policy.DeleteClientTlsPolicyRequest, + authorization_policy.DeleteAuthorizationPolicyRequest, dict, ], ) -def test_delete_client_tls_policy(request_type, transport: str = "grpc"): +def test_delete_authorization_policy(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2796,23 +2847,23 @@ def test_delete_client_tls_policy(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_client_tls_policy(request) + response = client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = client_tls_policy.DeleteClientTlsPolicyRequest() + request = authorization_policy.DeleteAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_client_tls_policy_non_empty_request_with_auto_populated_field(): +def test_delete_authorization_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( @@ -2823,26 +2874,26 @@ def test_delete_client_tls_policy_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = client_tls_policy.DeleteClientTlsPolicyRequest( + request = authorization_policy.DeleteAuthorizationPolicyRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_client_tls_policy(request=request) + client.delete_authorization_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == client_tls_policy.DeleteClientTlsPolicyRequest( + assert args[0] == authorization_policy.DeleteAuthorizationPolicyRequest( name="name_value", ) -def test_delete_client_tls_policy_use_cached_wrapped_rpc(): +def test_delete_authorization_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2857,7 +2908,7 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_client_tls_policy + client._transport.delete_authorization_policy in client._transport._wrapped_methods ) @@ -2867,10 +2918,10 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_client_tls_policy + client._transport.delete_authorization_policy ] = mock_rpc request = {} - client.delete_client_tls_policy(request) + client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2880,7 +2931,7 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_client_tls_policy(request) + client.delete_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2888,7 +2939,7 @@ def test_delete_client_tls_policy_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( +async def test_delete_authorization_policy_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2905,7 +2956,7 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_client_tls_policy + client._client._transport.delete_authorization_policy in client._client._transport._wrapped_methods ) @@ -2913,11 +2964,11 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_client_tls_policy + client._client._transport.delete_authorization_policy ] = mock_rpc request = {} - await client.delete_client_tls_policy(request) + await client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -2927,7 +2978,7 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_client_tls_policy(request) + await client.delete_authorization_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2935,9 +2986,9 @@ async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_client_tls_policy_async( +async def test_delete_authorization_policy_async( transport: str = "grpc_asyncio", - request_type=client_tls_policy.DeleteClientTlsPolicyRequest, + request_type=authorization_policy.DeleteAuthorizationPolicyRequest, ): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), @@ -2950,18 +3001,18 @@ async def test_delete_client_tls_policy_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_client_tls_policy(request) + response = await client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = client_tls_policy.DeleteClientTlsPolicyRequest() + request = authorization_policy.DeleteAuthorizationPolicyRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -2969,27 +3020,27 @@ async def test_delete_client_tls_policy_async( @pytest.mark.asyncio -async def test_delete_client_tls_policy_async_from_dict(): - await test_delete_client_tls_policy_async(request_type=dict) +async def test_delete_authorization_policy_async_from_dict(): + await test_delete_authorization_policy_async(request_type=dict) -def test_delete_client_tls_policy_field_headers(): +def test_delete_authorization_policy_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.DeleteClientTlsPolicyRequest() + request = authorization_policy.DeleteAuthorizationPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_client_tls_policy(request) + client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3005,25 +3056,25 @@ def test_delete_client_tls_policy_field_headers(): @pytest.mark.asyncio -async def test_delete_client_tls_policy_field_headers_async(): +async def test_delete_authorization_policy_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = client_tls_policy.DeleteClientTlsPolicyRequest() + request = authorization_policy.DeleteAuthorizationPolicyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_client_tls_policy(request) + await client.delete_authorization_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3038,20 +3089,20 @@ async def test_delete_client_tls_policy_field_headers_async(): ) in kw["metadata"] -def test_delete_client_tls_policy_flattened(): +def test_delete_authorization_policy_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_client_tls_policy( + client.delete_authorization_policy( name="name_value", ) @@ -3064,7 +3115,7 @@ def test_delete_client_tls_policy_flattened(): assert arg == mock_val -def test_delete_client_tls_policy_flattened_error(): +def test_delete_authorization_policy_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3072,21 +3123,21 @@ def test_delete_client_tls_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_client_tls_policy( - client_tls_policy.DeleteClientTlsPolicyRequest(), + client.delete_authorization_policy( + authorization_policy.DeleteAuthorizationPolicyRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_client_tls_policy_flattened_async(): +async def test_delete_authorization_policy_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authorization_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -3096,7 +3147,7 @@ async def test_delete_client_tls_policy_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_client_tls_policy( + response = await client.delete_authorization_policy( name="name_value", ) @@ -3110,7 +3161,7 @@ async def test_delete_client_tls_policy_flattened_async(): @pytest.mark.asyncio -async def test_delete_client_tls_policy_flattened_error_async(): +async def test_delete_authorization_policy_flattened_error_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3118,19 +3169,97 @@ async def test_delete_client_tls_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_client_tls_policy( - client_tls_policy.DeleteClientTlsPolicyRequest(), + await client.delete_authorization_policy( + authorization_policy.DeleteAuthorizationPolicyRequest(), name="name_value", ) -def test_list_client_tls_policies_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + dict, + ], +) +def test_list_backend_authentication_configs(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = client.list_backend_authentication_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackendAuthenticationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backend_authentication_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backend_authentication_config.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backend_authentication_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == backend_authentication_config.ListBackendAuthenticationConfigsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_backend_authentication_configs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -3139,7 +3268,7 @@ def test_list_client_tls_policies_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_client_tls_policies + client._transport.list_backend_authentication_configs in client._transport._wrapped_methods ) @@ -3149,251 +3278,568 @@ def test_list_client_tls_policies_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_client_tls_policies + client._transport.list_backend_authentication_configs ] = mock_rpc - request = {} - client.list_client_tls_policies(request) + client.list_backend_authentication_configs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_client_tls_policies(request) + client.list_backend_authentication_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_client_tls_policies_rest_required_fields( - request_type=client_tls_policy.ListClientTlsPoliciesRequest, +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.NetworkSecurityRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_backend_authentication_configs + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_client_tls_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backend_authentication_configs + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_backend_authentication_configs(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_client_tls_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.list_backend_authentication_configs(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ListClientTlsPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_async( + transport: str = "grpc_asyncio", + request_type=backend_authentication_config.ListBackendAuthenticationConfigsRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value = Response() - response_value.status_code = 200 + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Convert return value to protobuf type - return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb( - return_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - json_return_value = json_format.MessageToJson(return_value) + ) + response = await client.list_backend_authentication_configs(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + assert args[0] == request - response = client.list_client_tls_policies(request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackendAuthenticationConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_async_from_dict(): + await test_list_backend_authentication_configs_async(request_type=dict) -def test_list_client_tls_policies_rest_unset_required_fields(): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_list_backend_authentication_configs_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_client_tls_policies._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.ListBackendAuthenticationConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + call.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() ) - & set(("parent",)) + client.list_backend_authentication_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.ListBackendAuthenticationConfigsRequest() + + request.parent = "parent_value" -def test_list_client_tls_policies_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + await client.list_backend_authentication_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backend_authentication_configs_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ListClientTlsPoliciesResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backend_authentication_configs( + parent="parent_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_list_backend_authentication_configs_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backend_authentication_configs( + backend_authentication_config.ListBackendAuthenticationConfigsRequest(), parent="parent_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_client_tls_policies(**mock_args) +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backend_authentication_configs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_list_client_tls_policies_rest_flattened_error(transport: str = "rest"): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_client_tls_policies( - client_tls_policy.ListClientTlsPoliciesRequest(), + await client.list_backend_authentication_configs( + backend_authentication_config.ListBackendAuthenticationConfigsRequest(), parent="parent_value", ) -def test_list_client_tls_policies_rest_pager(transport: str = "rest"): +def test_list_backend_authentication_configs_pager(transport_name: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), ], next_page_token="abc", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[], + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[], next_page_token="def", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), ], next_page_token="ghi", ), - client_tls_policy.ListClientTlsPoliciesResponse( - client_tls_policies=[ - client_tls_policy.ClientTlsPolicy(), - client_tls_policy.ClientTlsPolicy(), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - client_tls_policy.ListClientTlsPoliciesResponse.to_json(x) for x in response + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backend_authentication_configs( + request={}, retry=retry, timeout=timeout ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_client_tls_policies(request=sample_request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 - assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in results) + assert all( + isinstance(i, backend_authentication_config.BackendAuthenticationConfig) + for i in results + ) - pages = list(client.list_client_tls_policies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + +def test_list_backend_authentication_configs_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="abc", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[], + next_page_token="def", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="ghi", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backend_authentication_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_client_tls_policy_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="abc", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[], + next_page_token="def", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="ghi", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backend_authentication_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backend_authentication_config.BackendAuthenticationConfig) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="abc", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[], + next_page_token="def", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="ghi", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backend_authentication_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.GetBackendAuthenticationConfigRequest, + dict, + ], +) +def test_get_backend_authentication_config(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backend_authentication_config.BackendAuthenticationConfig( + name="name_value", + description="description_value", + client_certificate="client_certificate_value", + trust_config="trust_config_value", + well_known_roots=backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE, + etag="etag_value", + ) + response = client.get_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backend_authentication_config.GetBackendAuthenticationConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance( + response, backend_authentication_config.BackendAuthenticationConfig + ) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.client_certificate == "client_certificate_value" + assert response.trust_config == "trust_config_value" + assert ( + response.well_known_roots + == backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE + ) + assert response.etag == "etag_value" + + +def test_get_backend_authentication_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backend_authentication_config.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backend_authentication_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == backend_authentication_config.GetBackendAuthenticationConfigRequest( + name="name_value", + ) + + +def test_get_backend_authentication_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -3402,7 +3848,7 @@ def test_get_client_tls_policy_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_client_tls_policy + client._transport.get_backend_authentication_config in client._transport._wrapped_methods ) @@ -3412,173 +3858,351 @@ def test_get_client_tls_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_client_tls_policy + client._transport.get_backend_authentication_config ] = mock_rpc - request = {} - client.get_client_tls_policy(request) + client.get_backend_authentication_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_client_tls_policy(request) + client.get_backend_authentication_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_client_tls_policy_rest_required_fields( - request_type=client_tls_policy.GetClientTlsPolicyRequest, +@pytest.mark.asyncio +async def test_get_backend_authentication_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.NetworkSecurityRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backend_authentication_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backend_authentication_config + ] = mock_rpc + + request = {} + await client.get_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backend_authentication_config_async( + transport: str = "grpc_asyncio", + request_type=backend_authentication_config.GetBackendAuthenticationConfigRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.BackendAuthenticationConfig( + name="name_value", + description="description_value", + client_certificate="client_certificate_value", + trust_config="trust_config_value", + well_known_roots=backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE, + etag="etag_value", + ) + ) + response = await client.get_backend_authentication_config(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backend_authentication_config.GetBackendAuthenticationConfigRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance( + response, backend_authentication_config.BackendAuthenticationConfig + ) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.client_certificate == "client_certificate_value" + assert response.trust_config == "trust_config_value" + assert ( + response.well_known_roots + == backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE + ) + assert response.etag == "etag_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_backend_authentication_config_async_from_dict(): + await test_get_backend_authentication_config_async(request_type=dict) + +def test_get_backend_authentication_config_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ClientTlsPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.GetBackendAuthenticationConfigRequest() - # Convert return value to protobuf type - return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + call.return_value = backend_authentication_config.BackendAuthenticationConfig() + client.get_backend_authentication_config(request) - response = client.get_client_tls_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_get_client_tls_policy_rest_unset_required_fields(): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_backend_authentication_config_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_client_tls_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.GetBackendAuthenticationConfigRequest() + request.name = "name_value" -def test_get_client_tls_policy_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.BackendAuthenticationConfig() + ) + await client.get_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backend_authentication_config_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ClientTlsPolicy() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backend_authentication_config.BackendAuthenticationConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backend_authentication_config( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_get_backend_authentication_config_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backend_authentication_config( + backend_authentication_config.GetBackendAuthenticationConfigRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_client_tls_policy(**mock_args) +@pytest.mark.asyncio +async def test_get_backend_authentication_config_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backend_authentication_config.BackendAuthenticationConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.BackendAuthenticationConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backend_authentication_config( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_client_tls_policy_rest_flattened_error(transport: str = "rest"): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_backend_authentication_config_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_client_tls_policy( - client_tls_policy.GetClientTlsPolicyRequest(), + await client.get_backend_authentication_config( + backend_authentication_config.GetBackendAuthenticationConfigRequest(), name="name_value", ) -def test_create_client_tls_policy_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + dict, + ], +) +def test_create_backend_authentication_config(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backend_authentication_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + ) + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backend_authentication_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest( + parent="parent_value", + backend_authentication_config_id="backend_authentication_config_id_value", + ) + + +def test_create_backend_authentication_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -3587,7 +4211,7 @@ def test_create_client_tls_policy_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_client_tls_policy + client._transport.create_backend_authentication_config in client._transport._wrapped_methods ) @@ -3597,387 +4221,376 @@ def test_create_client_tls_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_client_tls_policy + client._transport.create_backend_authentication_config ] = mock_rpc - request = {} - client.create_client_tls_policy(request) + client.create_backend_authentication_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_client_tls_policy(request) + client.create_backend_authentication_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_client_tls_policy_rest_required_fields( - request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, +@pytest.mark.asyncio +async def test_create_backend_authentication_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.NetworkSecurityRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request_init["client_tls_policy_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped - assert "clientTlsPolicyId" not in jsonified_request + # Ensure method has been cached + assert ( + client._client._transport.create_backend_authentication_config + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backend_authentication_config + ] = mock_rpc - # verify required fields with default values are now present - assert "clientTlsPolicyId" in jsonified_request - assert ( - jsonified_request["clientTlsPolicyId"] == request_init["client_tls_policy_id"] + request = {} + await client.create_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backend_authentication_config_async( + transport: str = "grpc_asyncio", + request_type=gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request["parent"] = "parent_value" - jsonified_request["clientTlsPolicyId"] = "client_tls_policy_id_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_client_tls_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("client_tls_policy_id",)) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backend_authentication_config_async_from_dict(): + await test_create_backend_authentication_config_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "clientTlsPolicyId" in jsonified_request - assert jsonified_request["clientTlsPolicyId"] == "client_tls_policy_id_value" +def test_create_backend_authentication_config_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backend_authentication_config(request) - response = client.create_client_tls_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "clientTlsPolicyId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_create_client_tls_policy_rest_unset_required_fields(): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_backend_authentication_config_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_client_tls_policy._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("clientTlsPolicyId",)) - & set( - ( - "parent", - "clientTlsPolicyId", - "clientTlsPolicy", - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() ) + request.parent = "parent_value" -def test_create_client_tls_policy_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backend_authentication_config_flattened(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backend_authentication_config( parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_client_tls_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies" - % client.transport._host, - args[1], + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backend_authentication_config + mock_val = gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" ) + assert arg == mock_val + arg = args[0].backend_authentication_config_id + mock_val = "backend_authentication_config_id_value" + assert arg == mock_val -def test_create_client_tls_policy_rest_flattened_error(transport: str = "rest"): +def test_create_backend_authentication_config_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_client_tls_policy( - gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + client.create_backend_authentication_config( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest(), parent="parent_value", - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - client_tls_policy_id="client_tls_policy_id_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", ) -def test_update_client_tls_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_create_backend_authentication_config_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - # Ensure method has been cached - assert ( - client._transport.update_client_tls_policy - in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backend_authentication_config( + parent="parent_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", ) - client._transport._wrapped_methods[ - client._transport.update_client_tls_policy - ] = mock_rpc - - request = {} - client.update_client_tls_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_client_tls_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backend_authentication_config + mock_val = gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ) + assert arg == mock_val + arg = args[0].backend_authentication_config_id + mock_val = "backend_authentication_config_id_value" + assert arg == mock_val -def test_update_client_tls_policy_rest_required_fields( - request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, -): - transport_class = transports.NetworkSecurityRestTransport - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backend_authentication_config_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_client_tls_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backend_authentication_config( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest(), + parent="parent_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", + ) - # verify required fields with non-default values are left alone +@pytest.mark.parametrize( + "request_type", + [ + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + dict, + ], +) +def test_update_backend_authentication_config(request_type, transport: str = "grpc"): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_client_tls_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backend_authentication_config(request) -def test_update_client_tls_policy_rest_unset_required_fields(): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + assert args[0] == request - unset_fields = transport.update_client_tls_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("clientTlsPolicy",))) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_update_client_tls_policy_rest_flattened(): +def test_update_backend_authentication_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "client_tls_policy": { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_client_tls_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha1/{client_tls_policy.name=projects/*/locations/*/clientTlsPolicies/*}" - % client.transport._host, - args[1], - ) - - -def test_update_client_tls_policy_rest_flattened_error(transport: str = "rest"): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_client_tls_policy( - gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), - client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backend_authentication_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() ) -def test_delete_client_tls_policy_rest_use_cached_wrapped_rpc(): +def test_update_backend_authentication_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -3986,7 +4599,7 @@ def test_delete_client_tls_policy_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_client_tls_policy + client._transport.update_backend_authentication_config in client._transport._wrapped_methods ) @@ -3996,1970 +4609,35494 @@ def test_delete_client_tls_policy_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_client_tls_policy + client._transport.update_backend_authentication_config ] = mock_rpc - request = {} - client.delete_client_tls_policy(request) + client.update_backend_authentication_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_client_tls_policy(request) + client.update_backend_authentication_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_client_tls_policy_rest_required_fields( - request_type=client_tls_policy.DeleteClientTlsPolicyRequest, +@pytest.mark.asyncio +async def test_update_backend_authentication_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.NetworkSecurityRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.update_backend_authentication_config + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backend_authentication_config + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.update_backend_authentication_config(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_client_tls_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + await client.update_backend_authentication_config(request) - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_update_backend_authentication_config_async( + transport: str = "grpc_asyncio", + request_type=gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.delete_client_tls_policy(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backend_authentication_config(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_delete_client_tls_policy_rest_unset_required_fields(): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.delete_client_tls_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) +@pytest.mark.asyncio +async def test_update_backend_authentication_config_async_from_dict(): + await test_update_backend_authentication_config_async(request_type=dict) -def test_delete_client_tls_policy_rest_flattened(): +def test_update_backend_authentication_config_field_headers(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } + request.backend_authentication_config.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backend_authentication_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backend_authentication_config_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + + request.backend_authentication_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - mock_args.update(sample_request) + await client.update_backend_authentication_config(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.delete_client_tls_policy(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backend_authentication_config.name=name_value", + ) in kw["metadata"] + + +def test_update_backend_authentication_config_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backend_authentication_config( + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backend_authentication_config + mock_val = gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_delete_client_tls_policy_rest_flattened_error(transport: str = "rest"): +def test_update_backend_authentication_config_flattened_error(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_client_tls_policy( - client_tls_policy.DeleteClientTlsPolicyRequest(), - name="name_value", + client.update_backend_authentication_config( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest(), + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.NetworkSecurityGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.asyncio +async def test_update_backend_authentication_config_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) - with pytest.raises(ValueError): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.NetworkSecurityGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NetworkSecurityClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - # It is an error to provide an api_key and a transport instance. - transport = transports.NetworkSecurityGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = NetworkSecurityClient( - client_options=options, - transport=transport, + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backend_authentication_config( + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = NetworkSecurityClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backend_authentication_config + mock_val = gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val - # It is an error to provide scopes and a transport instance. - transport = transports.NetworkSecurityGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + +@pytest.mark.asyncio +async def test_update_backend_authentication_config_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. with pytest.raises(ValueError): - client = NetworkSecurityClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + await client.update_backend_authentication_config( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest(), + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.NetworkSecurityGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + dict, + ], +) +def test_delete_backend_authentication_config(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = NetworkSecurityClient(transport=transport) - assert client.transport is transport + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.NetworkSecurityGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.NetworkSecurityGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.NetworkSecurityGrpcTransport, - transports.NetworkSecurityGrpcAsyncIOTransport, - transports.NetworkSecurityRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -def test_transport_kind_grpc(): - transport = NetworkSecurityClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_client_tls_policies_empty_call_grpc(): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: - call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() - client.list_client_tls_policies(request=None) + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backend_authentication_config(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + request = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_client_tls_policy_empty_call_grpc(): +def test_delete_backend_authentication_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) - # Mock the actual call, and fake the request. + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backend_authentication_config.DeleteBackendAuthenticationConfigRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: - call.return_value = client_tls_policy.ClientTlsPolicy() - client.get_client_tls_policy(request=None) - - # Establish that the underlying stub method was called. + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backend_authentication_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.GetClientTlsPolicyRequest() - - assert args[0] == request_msg + assert args[ + 0 + ] == backend_authentication_config.DeleteBackendAuthenticationConfigRequest( + name="name_value", + etag="etag_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_client_tls_policy_empty_call_grpc(): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_delete_backend_authentication_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_client_tls_policy(request=None) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + # Ensure method has been cached + assert ( + client._transport.delete_backend_authentication_config + in client._transport._wrapped_methods + ) - assert args[0] == request_msg + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backend_authentication_config + ] = mock_rpc + request = {} + client.delete_backend_authentication_config(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_client_tls_policy_empty_call_grpc(): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_client_tls_policy(request=None) + client.delete_backend_authentication_config(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +@pytest.mark.asyncio +async def test_delete_backend_authentication_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_client_tls_policy_empty_call_grpc(): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_client_tls_policy(request=None) + # Ensure method has been cached + assert ( + client._client._transport.delete_backend_authentication_config + in client._client._transport._wrapped_methods + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backend_authentication_config + ] = mock_rpc - assert args[0] == request_msg + request = {} + await client.delete_backend_authentication_config(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_kind_grpc_asyncio(): - transport = NetworkSecurityAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + await client.delete_backend_authentication_config(request) -def test_initialize_client_w_grpc_asyncio(): - client = NetworkSecurityAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_list_client_tls_policies_empty_call_grpc_asyncio(): +async def test_delete_backend_authentication_config_async( + transport: str = "grpc_asyncio", + request_type=backend_authentication_config.DeleteBackendAuthenticationConfigRequest, +): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport=transport, ) - # Mock the actual call, and fake the request. + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ListClientTlsPoliciesResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - await client.list_client_tls_policies(request=None) + response = await client.delete_backend_authentication_config(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + request = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_client_tls_policy_empty_call_grpc_asyncio(): - client = NetworkSecurityAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) +async def test_delete_backend_authentication_config_async_from_dict(): + await test_delete_backend_authentication_config_async(request_type=dict) - # Mock the actual call, and fake the request. + +def test_delete_backend_authentication_config_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - client_tls_policy.ClientTlsPolicy( - name="name_value", - description="description_value", - sni="sni_value", - ) - ) - await client.get_client_tls_policy(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backend_authentication_config(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.GetClientTlsPolicyRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_client_tls_policy_empty_call_grpc_asyncio(): +async def test_delete_backend_authentication_config_field_headers_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: - # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name="operations/op") ) - await client.create_client_tls_policy(request=None) + await client.delete_backend_authentication_config(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_client_tls_policy_empty_call_grpc_asyncio(): - client = NetworkSecurityAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_backend_authentication_config_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backend_authentication_config( + name="name_value", ) - await client.update_client_tls_policy(request=None) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - assert args[0] == request_msg + +def test_delete_backend_authentication_config_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backend_authentication_config( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest(), + name="name_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_client_tls_policy_empty_call_grpc_asyncio(): +async def test_delete_backend_authentication_config_flattened_async(): client = NetworkSecurityAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_backend_authentication_config), "__call__" ) as call: # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.delete_client_tls_policy(request=None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backend_authentication_config( + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = NetworkSecurityClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_client_tls_policies_rest_bad_request( - request_type=client_tls_policy.ListClientTlsPoliciesRequest, -): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" +@pytest.mark.asyncio +async def test_delete_backend_authentication_config_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_client_tls_policies(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backend_authentication_config( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest(), + name="name_value", + ) @pytest.mark.parametrize( "request_type", [ - client_tls_policy.ListClientTlsPoliciesRequest, + server_tls_policy.ListServerTlsPoliciesRequest, dict, ], ) -def test_list_client_tls_policies_rest_call_success(request_type): +def test_list_server_tls_policies(request_type, transport: str = "grpc"): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ListClientTlsPoliciesResponse( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ListServerTlsPoliciesResponse( next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) + response = client.list_server_tls_policies(request) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_client_tls_policies(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = server_tls_policy.ListServerTlsPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientTlsPoliciesPager) + assert isinstance(response, pagers.ListServerTlsPoliciesPager) assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_client_tls_policies_rest_interceptors(null_interceptor): - transport = transports.NetworkSecurityRestTransport( +def test_list_server_tls_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NetworkSecurityRestInterceptor(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = server_tls_policy.ListServerTlsPoliciesRequest( + parent="parent_value", + page_token="page_token_value", ) - client = NetworkSecurityClient(transport=transport) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "post_list_client_tls_policies" - ) as post, mock.patch.object( - transports.NetworkSecurityRestInterceptor, - "post_list_client_tls_policies_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "pre_list_client_tls_policies" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_tls_policy.ListClientTlsPoliciesRequest.pb( - client_tls_policy.ListClientTlsPoliciesRequest() + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_tls_policy.ListClientTlsPoliciesResponse.to_json( - client_tls_policy.ListClientTlsPoliciesResponse() + client.list_server_tls_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == server_tls_policy.ListServerTlsPoliciesRequest( + parent="parent_value", + page_token="page_token_value", ) - req.return_value.content = return_value - request = client_tls_policy.ListClientTlsPoliciesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_tls_policy.ListClientTlsPoliciesResponse() - post_with_metadata.return_value = ( - client_tls_policy.ListClientTlsPoliciesResponse(), - metadata, + +def test_list_server_tls_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - client.list_client_tls_policies( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_server_tls_policies + in client._transport._wrapped_methods ) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_server_tls_policies + ] = mock_rpc + request = {} + client.list_server_tls_policies(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_get_client_tls_policy_rest_bad_request( - request_type=client_tls_policy.GetClientTlsPolicyRequest, -): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - request = request_type(**request_init) + client.list_server_tls_policies(request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_client_tls_policy(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -@pytest.mark.parametrize( - "request_type", - [ - client_tls_policy.GetClientTlsPolicyRequest, - dict, - ], -) -def test_get_client_tls_policy_rest_call_success(request_type): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +@pytest.mark.asyncio +async def test_list_server_tls_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - request = request_type(**request_init) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = client_tls_policy.ClientTlsPolicy( - name="name_value", - description="description_value", - sni="sni_value", + # Ensure method has been cached + assert ( + client._client._transport.list_server_tls_policies + in client._client._transport._wrapped_methods ) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_server_tls_policies + ] = mock_rpc - # Convert return value to protobuf type - return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_client_tls_policy(request) + request = {} + await client.list_server_tls_policies(request) - # Establish that the response is the type that we expect. - assert isinstance(response, client_tls_policy.ClientTlsPolicy) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.sni == "sni_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + await client.list_server_tls_policies(request) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_client_tls_policy_rest_interceptors(null_interceptor): - transport = transports.NetworkSecurityRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NetworkSecurityRestInterceptor(), + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_server_tls_policies_async( + transport: str = "grpc_asyncio", + request_type=server_tls_policy.ListServerTlsPoliciesRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - client = NetworkSecurityClient(transport=transport) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "post_get_client_tls_policy" - ) as post, mock.patch.object( - transports.NetworkSecurityRestInterceptor, - "post_get_client_tls_policy_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "pre_get_client_tls_policy" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_tls_policy.GetClientTlsPolicyRequest.pb( - client_tls_policy.GetClientTlsPolicyRequest() + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ListServerTlsPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + response = await client.list_server_tls_policies(request) - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_tls_policy.ClientTlsPolicy.to_json( - client_tls_policy.ClientTlsPolicy() - ) - req.return_value.content = return_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = server_tls_policy.ListServerTlsPoliciesRequest() + assert args[0] == request - request = client_tls_policy.GetClientTlsPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_tls_policy.ClientTlsPolicy() - post_with_metadata.return_value = client_tls_policy.ClientTlsPolicy(), metadata + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServerTlsPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - client.get_client_tls_policy( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() +@pytest.mark.asyncio +async def test_list_server_tls_policies_async_from_dict(): + await test_list_server_tls_policies_async(request_type=dict) -def test_create_client_tls_policy_rest_bad_request( - request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, -): +def test_list_server_tls_policies_field_headers(): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_client_tls_policy(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.ListServerTlsPoliciesRequest() + request.parent = "parent_value" -@pytest.mark.parametrize( - "request_type", - [ - gcn_client_tls_policy.CreateClientTlsPolicyRequest, - dict, - ], -) -def test_create_client_tls_policy_rest_call_success(request_type): - client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + call.return_value = server_tls_policy.ListServerTlsPoliciesResponse() + client.list_server_tls_policies(request) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["client_tls_policy"] = { - "name": "name_value", - "description": "description_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "sni": "sni_value", - "client_certificate": { - "local_filepath": { - "certificate_path": "certificate_path_value", - "private_key_path": "private_key_path_value", - }, - "grpc_endpoint": {"target_uri": "target_uri_value"}, - "certificate_provider_instance": { - "plugin_instance": "plugin_instance_value" - }, - }, - "server_validation_ca": [ - { - "ca_cert_path": "ca_cert_path_value", - "grpc_endpoint": {}, - "certificate_provider_instance": {}, - } - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Determine if the message type is proto-plus or protobuf - test_field = gcn_client_tls_policy.CreateClientTlsPolicyRequest.meta.fields[ - "client_tls_policy" - ] + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.asyncio +async def test_list_server_tls_policies_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.ListServerTlsPoliciesRequest() - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + request.parent = "parent_value" - subfields_not_in_runtime = [] + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ListServerTlsPoliciesResponse() + ) + await client.list_server_tls_policies(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["client_tls_policy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["client_tls_policy"][field])): - del request_init["client_tls_policy"][field][i][subfield] - else: - del request_init["client_tls_policy"][field][subfield] - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") +def test_list_server_tls_policies_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_client_tls_policy(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ListServerTlsPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_server_tls_policies( + parent="parent_value", + ) - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_client_tls_policy_rest_interceptors(null_interceptor): - transport = transports.NetworkSecurityRestTransport( +def test_list_server_tls_policies_flattened_error(): + client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NetworkSecurityRestInterceptor(), ) - client = NetworkSecurityClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetworkSecurityRestInterceptor, "post_create_client_tls_policy" - ) as post, mock.patch.object( - transports.NetworkSecurityRestInterceptor, - "post_create_client_tls_policy_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "pre_create_client_tls_policy" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcn_client_tls_policy.CreateClientTlsPolicyRequest.pb( - gcn_client_tls_policy.CreateClientTlsPolicyRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_server_tls_policies( + server_tls_policy.ListServerTlsPoliciesRequest(), + parent="parent_value", ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata +@pytest.mark.asyncio +async def test_list_server_tls_policies_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.create_client_tls_policy( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ListServerTlsPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ListServerTlsPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_server_tls_policies( + parent="parent_value", ) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_update_client_tls_policy_rest_bad_request( - request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, -): +@pytest.mark.asyncio +async def test_list_server_tls_policies_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_server_tls_policies( + server_tls_policy.ListServerTlsPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_server_tls_policies_pager(transport_name: str = "grpc"): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - # send a request that will satisfy transcoding - request_init = { - "client_tls_policy": { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="abc", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[], + next_page_token="def", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="ghi", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_server_tls_policies( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, server_tls_policy.ServerTlsPolicy) for i in results) + + +def test_list_server_tls_policies_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="abc", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[], + next_page_token="def", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="ghi", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_server_tls_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_server_tls_policies_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="abc", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[], + next_page_token="def", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="ghi", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_server_tls_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, server_tls_policy.ServerTlsPolicy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_server_tls_policies_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="abc", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[], + next_page_token="def", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="ghi", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_server_tls_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + server_tls_policy.GetServerTlsPolicyRequest, + dict, + ], +) +def test_get_server_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ServerTlsPolicy( + name="name_value", + description="description_value", + allow_open=True, + ) + response = client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = server_tls_policy.GetServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, server_tls_policy.ServerTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.allow_open is True + + +def test_get_server_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = server_tls_policy.GetServerTlsPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_server_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == server_tls_policy.GetServerTlsPolicyRequest( + name="name_value", + ) + + +def test_get_server_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_server_tls_policy + ] = mock_rpc + request = {} + client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_server_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_server_tls_policy + ] = mock_rpc + + request = {} + await client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=server_tls_policy.GetServerTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ServerTlsPolicy( + name="name_value", + description="description_value", + allow_open=True, + ) + ) + response = await client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = server_tls_policy.GetServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, server_tls_policy.ServerTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.allow_open is True + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_async_from_dict(): + await test_get_server_tls_policy_async(request_type=dict) + + +def test_get_server_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.GetServerTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + call.return_value = server_tls_policy.ServerTlsPolicy() + client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.GetServerTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ServerTlsPolicy() + ) + await client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_server_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ServerTlsPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_server_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_server_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_server_tls_policy( + server_tls_policy.GetServerTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = server_tls_policy.ServerTlsPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ServerTlsPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_server_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_server_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_server_tls_policy( + server_tls_policy.GetServerTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_server_tls_policy.CreateServerTlsPolicyRequest, + dict, + ], +) +def test_create_server_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_server_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_server_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_server_tls_policy.CreateServerTlsPolicyRequest( + parent="parent_value", + server_tls_policy_id="server_tls_policy_id_value", + ) + + +def test_create_server_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_server_tls_policy + ] = mock_rpc + request = {} + client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_server_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_server_tls_policy + ] = mock_rpc + + request = {} + await client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_server_tls_policy.CreateServerTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_async_from_dict(): + await test_create_server_tls_policy_async(request_type=dict) + + +def test_create_server_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_server_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_server_tls_policy( + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].server_tls_policy + mock_val = gcn_server_tls_policy.ServerTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].server_tls_policy_id + mock_val = "server_tls_policy_id_value" + assert arg == mock_val + + +def test_create_server_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_server_tls_policy( + gcn_server_tls_policy.CreateServerTlsPolicyRequest(), + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_server_tls_policy( + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].server_tls_policy + mock_val = gcn_server_tls_policy.ServerTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].server_tls_policy_id + mock_val = "server_tls_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_server_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_server_tls_policy( + gcn_server_tls_policy.CreateServerTlsPolicyRequest(), + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_server_tls_policy.UpdateServerTlsPolicyRequest, + dict, + ], +) +def test_update_server_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_server_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_server_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + +def test_update_server_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_server_tls_policy + ] = mock_rpc + request = {} + client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_server_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_server_tls_policy + ] = mock_rpc + + request = {} + await client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_server_tls_policy.UpdateServerTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_async_from_dict(): + await test_update_server_tls_policy_async(request_type=dict) + + +def test_update_server_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + request.server_tls_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "server_tls_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + request.server_tls_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "server_tls_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_server_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_server_tls_policy( + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].server_tls_policy + mock_val = gcn_server_tls_policy.ServerTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_server_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_server_tls_policy( + gcn_server_tls_policy.UpdateServerTlsPolicyRequest(), + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_server_tls_policy( + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].server_tls_policy + mock_val = gcn_server_tls_policy.ServerTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_server_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_server_tls_policy( + gcn_server_tls_policy.UpdateServerTlsPolicyRequest(), + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + server_tls_policy.DeleteServerTlsPolicyRequest, + dict, + ], +) +def test_delete_server_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = server_tls_policy.DeleteServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_server_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = server_tls_policy.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_server_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == server_tls_policy.DeleteServerTlsPolicyRequest( + name="name_value", + ) + + +def test_delete_server_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_server_tls_policy + ] = mock_rpc + request = {} + client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_server_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_server_tls_policy + ] = mock_rpc + + request = {} + await client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=server_tls_policy.DeleteServerTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = server_tls_policy.DeleteServerTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_async_from_dict(): + await test_delete_server_tls_policy_async(request_type=dict) + + +def test_delete_server_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.DeleteServerTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = server_tls_policy.DeleteServerTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_server_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_server_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_server_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_server_tls_policy( + server_tls_policy.DeleteServerTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_server_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_server_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_server_tls_policy( + server_tls_policy.DeleteServerTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.ListClientTlsPoliciesRequest, + dict, + ], +) +def test_list_client_tls_policies(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ListClientTlsPoliciesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = client_tls_policy.ListClientTlsPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClientTlsPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_client_tls_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = client_tls_policy.ListClientTlsPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_client_tls_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == client_tls_policy.ListClientTlsPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_client_tls_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_client_tls_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_client_tls_policies + ] = mock_rpc + request = {} + client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_client_tls_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_client_tls_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_client_tls_policies + ] = mock_rpc + + request = {} + await client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_client_tls_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_async( + transport: str = "grpc_asyncio", + request_type=client_tls_policy.ListClientTlsPoliciesRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ListClientTlsPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = client_tls_policy.ListClientTlsPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClientTlsPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_async_from_dict(): + await test_list_client_tls_policies_async(request_type=dict) + + +def test_list_client_tls_policies_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.ListClientTlsPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.ListClientTlsPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ListClientTlsPoliciesResponse() + ) + await client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_client_tls_policies_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_client_tls_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_client_tls_policies_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_client_tls_policies( + client_tls_policy.ListClientTlsPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ListClientTlsPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_client_tls_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_client_tls_policies( + client_tls_policy.ListClientTlsPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_client_tls_policies_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="abc", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[], + next_page_token="def", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="ghi", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_client_tls_policies( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in results) + + +def test_list_client_tls_policies_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="abc", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[], + next_page_token="def", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="ghi", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_client_tls_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="abc", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[], + next_page_token="def", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="ghi", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_client_tls_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_client_tls_policies_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="abc", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[], + next_page_token="def", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="ghi", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_client_tls_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.GetClientTlsPolicyRequest, + dict, + ], +) +def test_get_client_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ClientTlsPolicy( + name="name_value", + description="description_value", + sni="sni_value", + ) + response = client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = client_tls_policy.GetClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, client_tls_policy.ClientTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.sni == "sni_value" + + +def test_get_client_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = client_tls_policy.GetClientTlsPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_client_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == client_tls_policy.GetClientTlsPolicyRequest( + name="name_value", + ) + + +def test_get_client_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_client_tls_policy + ] = mock_rpc + request = {} + client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_client_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_client_tls_policy + ] = mock_rpc + + request = {} + await client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=client_tls_policy.GetClientTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ClientTlsPolicy( + name="name_value", + description="description_value", + sni="sni_value", + ) + ) + response = await client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = client_tls_policy.GetClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, client_tls_policy.ClientTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.sni == "sni_value" + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_async_from_dict(): + await test_get_client_tls_policy_async(request_type=dict) + + +def test_get_client_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.GetClientTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + call.return_value = client_tls_policy.ClientTlsPolicy() + client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.GetClientTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ClientTlsPolicy() + ) + await client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_client_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ClientTlsPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_client_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_client_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_client_tls_policy( + client_tls_policy.GetClientTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = client_tls_policy.ClientTlsPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ClientTlsPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_client_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_client_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_client_tls_policy( + client_tls_policy.GetClientTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_client_tls_policy.CreateClientTlsPolicyRequest, + dict, + ], +) +def test_create_client_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_client_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest( + parent="parent_value", + client_tls_policy_id="client_tls_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_client_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_client_tls_policy.CreateClientTlsPolicyRequest( + parent="parent_value", + client_tls_policy_id="client_tls_policy_id_value", + ) + + +def test_create_client_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_client_tls_policy + ] = mock_rpc + request = {} + client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_client_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_client_tls_policy + ] = mock_rpc + + request = {} + await client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_async_from_dict(): + await test_create_client_tls_policy_async(request_type=dict) + + +def test_create_client_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_client_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_client_tls_policy( + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].client_tls_policy + mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].client_tls_policy_id + mock_val = "client_tls_policy_id_value" + assert arg == mock_val + + +def test_create_client_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_client_tls_policy( + gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_client_tls_policy( + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].client_tls_policy + mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].client_tls_policy_id + mock_val = "client_tls_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_client_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_client_tls_policy( + gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + dict, + ], +) +def test_update_client_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_client_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_client_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + +def test_update_client_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_client_tls_policy + ] = mock_rpc + request = {} + client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_client_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_client_tls_policy + ] = mock_rpc + + request = {} + await client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_async_from_dict(): + await test_update_client_tls_policy_async(request_type=dict) + + +def test_update_client_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + request.client_tls_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "client_tls_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + request.client_tls_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "client_tls_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_client_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_client_tls_policy( + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].client_tls_policy + mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_client_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_client_tls_policy( + gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_client_tls_policy( + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].client_tls_policy + mock_val = gcn_client_tls_policy.ClientTlsPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_client_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_client_tls_policy( + gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.DeleteClientTlsPolicyRequest, + dict, + ], +) +def test_delete_client_tls_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = client_tls_policy.DeleteClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_client_tls_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = client_tls_policy.DeleteClientTlsPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_client_tls_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == client_tls_policy.DeleteClientTlsPolicyRequest( + name="name_value", + ) + + +def test_delete_client_tls_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_client_tls_policy + ] = mock_rpc + request = {} + client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_client_tls_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_client_tls_policy + ] = mock_rpc + + request = {} + await client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_async( + transport: str = "grpc_asyncio", + request_type=client_tls_policy.DeleteClientTlsPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = client_tls_policy.DeleteClientTlsPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_async_from_dict(): + await test_delete_client_tls_policy_async(request_type=dict) + + +def test_delete_client_tls_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.DeleteClientTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = client_tls_policy.DeleteClientTlsPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_client_tls_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_client_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_client_tls_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_client_tls_policy( + client_tls_policy.DeleteClientTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_client_tls_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_client_tls_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_client_tls_policy( + client_tls_policy.DeleteClientTlsPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.ListGatewaySecurityPoliciesRequest, + dict, + ], +) +def test_list_gateway_security_policies(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_gateway_security_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_gateway_security_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gateway_security_policy.ListGatewaySecurityPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_gateway_security_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_gateway_security_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gateway_security_policies + ] = mock_rpc + request = {} + client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gateway_security_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_gateway_security_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_gateway_security_policies + ] = mock_rpc + + request = {} + await client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_gateway_security_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy.ListGatewaySecurityPoliciesRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_async_from_dict(): + await test_list_gateway_security_policies_async(request_type=dict) + + +def test_list_gateway_security_policies_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + call.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + await client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_gateway_security_policies_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_gateway_security_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_gateway_security_policies_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gateway_security_policies( + gateway_security_policy.ListGatewaySecurityPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_gateway_security_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_gateway_security_policies( + gateway_security_policy.ListGatewaySecurityPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_gateway_security_policies_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="abc", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[], + next_page_token="def", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="ghi", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_gateway_security_policies( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, gateway_security_policy.GatewaySecurityPolicy) + for i in results + ) + + +def test_list_gateway_security_policies_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="abc", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[], + next_page_token="def", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="ghi", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_gateway_security_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="abc", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[], + next_page_token="def", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="ghi", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_gateway_security_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, gateway_security_policy.GatewaySecurityPolicy) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_gateway_security_policies_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="abc", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[], + next_page_token="def", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="ghi", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_gateway_security_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.GetGatewaySecurityPolicyRequest, + dict, + ], +) +def test_get_gateway_security_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy.GatewaySecurityPolicy( + name="name_value", + description="description_value", + tls_inspection_policy="tls_inspection_policy_value", + ) + response = client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.GetGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy.GatewaySecurityPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + + +def test_get_gateway_security_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_gateway_security_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gateway_security_policy.GetGatewaySecurityPolicyRequest( + name="name_value", + ) + + +def test_get_gateway_security_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_gateway_security_policy + ] = mock_rpc + request = {} + client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_gateway_security_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_gateway_security_policy + ] = mock_rpc + + request = {} + await client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy.GetGatewaySecurityPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.GatewaySecurityPolicy( + name="name_value", + description="description_value", + tls_inspection_policy="tls_inspection_policy_value", + ) + ) + response = await client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.GetGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy.GatewaySecurityPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_async_from_dict(): + await test_get_gateway_security_policy_async(request_type=dict) + + +def test_get_gateway_security_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.GetGatewaySecurityPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + call.return_value = gateway_security_policy.GatewaySecurityPolicy() + client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.GetGatewaySecurityPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.GatewaySecurityPolicy() + ) + await client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_gateway_security_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy.GatewaySecurityPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_gateway_security_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_gateway_security_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_gateway_security_policy( + gateway_security_policy.GetGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy.GatewaySecurityPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.GatewaySecurityPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_gateway_security_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_gateway_security_policy( + gateway_security_policy.GetGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, + dict, + ], +) +def test_create_gateway_security_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_gateway_security_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_gateway_security_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest( + parent="parent_value", + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + +def test_create_gateway_security_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_gateway_security_policy + ] = mock_rpc + request = {} + client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_gateway_security_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_gateway_security_policy + ] = mock_rpc + + request = {} + await client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_async_from_dict(): + await test_create_gateway_security_policy_async(request_type=dict) + + +def test_create_gateway_security_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_gateway_security_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_gateway_security_policy( + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].gateway_security_policy + mock_val = gcn_gateway_security_policy.GatewaySecurityPolicy(name="name_value") + assert arg == mock_val + arg = args[0].gateway_security_policy_id + mock_val = "gateway_security_policy_id_value" + assert arg == mock_val + + +def test_create_gateway_security_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_gateway_security_policy( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest(), + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_gateway_security_policy( + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].gateway_security_policy + mock_val = gcn_gateway_security_policy.GatewaySecurityPolicy(name="name_value") + assert arg == mock_val + arg = args[0].gateway_security_policy_id + mock_val = "gateway_security_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_gateway_security_policy( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest(), + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, + dict, + ], +) +def test_update_gateway_security_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_gateway_security_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_gateway_security_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + ) + + +def test_update_gateway_security_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_gateway_security_policy + ] = mock_rpc + request = {} + client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_gateway_security_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_gateway_security_policy + ] = mock_rpc + + request = {} + await client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_async_from_dict(): + await test_update_gateway_security_policy_async(request_type=dict) + + +def test_update_gateway_security_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + + request.gateway_security_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "gateway_security_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + + request.gateway_security_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "gateway_security_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_gateway_security_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_gateway_security_policy( + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].gateway_security_policy + mock_val = gcn_gateway_security_policy.GatewaySecurityPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_gateway_security_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_gateway_security_policy( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest(), + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_gateway_security_policy( + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].gateway_security_policy + mock_val = gcn_gateway_security_policy.GatewaySecurityPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_gateway_security_policy( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest(), + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.DeleteGatewaySecurityPolicyRequest, + dict, + ], +) +def test_delete_gateway_security_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_gateway_security_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_gateway_security_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gateway_security_policy.DeleteGatewaySecurityPolicyRequest( + name="name_value", + ) + + +def test_delete_gateway_security_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_gateway_security_policy + ] = mock_rpc + request = {} + client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_gateway_security_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_gateway_security_policy + ] = mock_rpc + + request = {} + await client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy.DeleteGatewaySecurityPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_async_from_dict(): + await test_delete_gateway_security_policy_async(request_type=dict) + + +def test_delete_gateway_security_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_gateway_security_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_gateway_security_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_gateway_security_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_gateway_security_policy( + gateway_security_policy.DeleteGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_gateway_security_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_gateway_security_policy( + gateway_security_policy.DeleteGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + dict, + ], +) +def test_list_gateway_security_policy_rules(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPolicyRulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_gateway_security_policy_rules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_gateway_security_policy_rules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_gateway_security_policy_rules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_gateway_security_policy_rules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gateway_security_policy_rules + ] = mock_rpc + request = {} + client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gateway_security_policy_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_gateway_security_policy_rules + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_gateway_security_policy_rules + ] = mock_rpc + + request = {} + await client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_gateway_security_policy_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPolicyRulesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_async_from_dict(): + await test_list_gateway_security_policy_rules_async(request_type=dict) + + +def test_list_gateway_security_policy_rules_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + call.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + await client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_gateway_security_policy_rules_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_gateway_security_policy_rules( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_gateway_security_policy_rules_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gateway_security_policy_rules( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_gateway_security_policy_rules( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_gateway_security_policy_rules( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest(), + parent="parent_value", + ) + + +def test_list_gateway_security_policy_rules_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="abc", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[], + next_page_token="def", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="ghi", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_gateway_security_policy_rules( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, gateway_security_policy_rule.GatewaySecurityPolicyRule) + for i in results + ) + + +def test_list_gateway_security_policy_rules_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="abc", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[], + next_page_token="def", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="ghi", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_gateway_security_policy_rules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="abc", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[], + next_page_token="def", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="ghi", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_gateway_security_policy_rules( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, gateway_security_policy_rule.GatewaySecurityPolicyRule) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="abc", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[], + next_page_token="def", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="ghi", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_gateway_security_policy_rules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_get_gateway_security_policy_rule(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule( + name="name_value", + enabled=True, + priority=898, + description="description_value", + session_matcher="session_matcher_value", + application_matcher="application_matcher_value", + tls_inspection_enabled=True, + basic_profile=gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW, + ) + response = client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy_rule.GatewaySecurityPolicyRule) + assert response.name == "name_value" + assert response.enabled is True + assert response.priority == 898 + assert response.description == "description_value" + assert response.session_matcher == "session_matcher_value" + assert response.application_matcher == "application_matcher_value" + assert response.tls_inspection_enabled is True + + +def test_get_gateway_security_policy_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_gateway_security_policy_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + +def test_get_gateway_security_policy_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_gateway_security_policy_rule + ] = mock_rpc + request = {} + client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_gateway_security_policy_rule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_gateway_security_policy_rule + ] = mock_rpc + + request = {} + await client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.GatewaySecurityPolicyRule( + name="name_value", + enabled=True, + priority=898, + description="description_value", + session_matcher="session_matcher_value", + application_matcher="application_matcher_value", + tls_inspection_enabled=True, + ) + ) + response = await client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy_rule.GatewaySecurityPolicyRule) + assert response.name == "name_value" + assert response.enabled is True + assert response.priority == 898 + assert response.description == "description_value" + assert response.session_matcher == "session_matcher_value" + assert response.application_matcher == "application_matcher_value" + assert response.tls_inspection_enabled is True + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_async_from_dict(): + await test_get_gateway_security_policy_rule_async(request_type=dict) + + +def test_get_gateway_security_policy_rule_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.GatewaySecurityPolicyRule() + ) + await client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_gateway_security_policy_rule_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_gateway_security_policy_rule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_gateway_security_policy_rule_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_gateway_security_policy_rule( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.GatewaySecurityPolicyRule() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_gateway_security_policy_rule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_gateway_security_policy_rule( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_create_gateway_security_policy_rule(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_gateway_security_policy_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_gateway_security_policy_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest( + parent="parent_value", + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + +def test_create_gateway_security_policy_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_gateway_security_policy_rule + ] = mock_rpc + request = {} + client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_gateway_security_policy_rule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_gateway_security_policy_rule + ] = mock_rpc + + request = {} + await client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_async( + transport: str = "grpc_asyncio", + request_type=gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_async_from_dict(): + await test_create_gateway_security_policy_rule_async(request_type=dict) + + +def test_create_gateway_security_policy_rule_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_gateway_security_policy_rule_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_gateway_security_policy_rule( + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].gateway_security_policy_rule + mock_val = gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ) + assert arg == mock_val + arg = args[0].gateway_security_policy_rule_id + mock_val = "gateway_security_policy_rule_id_value" + assert arg == mock_val + + +def test_create_gateway_security_policy_rule_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest(), + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_gateway_security_policy_rule( + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].gateway_security_policy_rule + mock_val = gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ) + assert arg == mock_val + arg = args[0].gateway_security_policy_rule_id + mock_val = "gateway_security_policy_rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest(), + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_update_gateway_security_policy_rule(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_gateway_security_policy_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_gateway_security_policy_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + + +def test_update_gateway_security_policy_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_gateway_security_policy_rule + ] = mock_rpc + request = {} + client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_gateway_security_policy_rule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_gateway_security_policy_rule + ] = mock_rpc + + request = {} + await client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_async( + transport: str = "grpc_asyncio", + request_type=gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_async_from_dict(): + await test_update_gateway_security_policy_rule_async(request_type=dict) + + +def test_update_gateway_security_policy_rule_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + + request.gateway_security_policy_rule.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "gateway_security_policy_rule.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + + request.gateway_security_policy_rule.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "gateway_security_policy_rule.name=name_value", + ) in kw["metadata"] + + +def test_update_gateway_security_policy_rule_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_gateway_security_policy_rule( + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].gateway_security_policy_rule + mock_val = gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_gateway_security_policy_rule_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest(), + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_gateway_security_policy_rule( + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].gateway_security_policy_rule + mock_val = gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest(), + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_delete_gateway_security_policy_rule(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_gateway_security_policy_rule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_gateway_security_policy_rule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest( + name="name_value", + ) + + +def test_delete_gateway_security_policy_rule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_gateway_security_policy_rule + ] = mock_rpc + request = {} + client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_gateway_security_policy_rule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_gateway_security_policy_rule + ] = mock_rpc + + request = {} + await client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_async( + transport: str = "grpc_asyncio", + request_type=gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_async_from_dict(): + await test_delete_gateway_security_policy_rule_async(request_type=dict) + + +def test_delete_gateway_security_policy_rule_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_gateway_security_policy_rule_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_gateway_security_policy_rule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_gateway_security_policy_rule_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_gateway_security_policy_rule( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_gateway_security_policy_rule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_gateway_security_policy_rule( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.ListUrlListsRequest, + dict, + ], +) +def test_list_url_lists(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.ListUrlListsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = url_list.ListUrlListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUrlListsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_url_lists_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = url_list.ListUrlListsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_url_lists(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == url_list.ListUrlListsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_url_lists_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_url_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_url_lists] = mock_rpc + request = {} + client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_url_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_url_lists_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_url_lists + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_url_lists + ] = mock_rpc + + request = {} + await client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_url_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_url_lists_async( + transport: str = "grpc_asyncio", request_type=url_list.ListUrlListsRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.ListUrlListsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = url_list.ListUrlListsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUrlListsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_url_lists_async_from_dict(): + await test_list_url_lists_async(request_type=dict) + + +def test_list_url_lists_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.ListUrlListsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + call.return_value = url_list.ListUrlListsResponse() + client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_url_lists_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.ListUrlListsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.ListUrlListsResponse() + ) + await client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_url_lists_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.ListUrlListsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_url_lists( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_url_lists_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_url_lists( + url_list.ListUrlListsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_url_lists_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.ListUrlListsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.ListUrlListsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_url_lists( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_url_lists_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_url_lists( + url_list.ListUrlListsRequest(), + parent="parent_value", + ) + + +def test_list_url_lists_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + url_list.UrlList(), + ], + next_page_token="abc", + ), + url_list.ListUrlListsResponse( + url_lists=[], + next_page_token="def", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + ], + next_page_token="ghi", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_url_lists(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, url_list.UrlList) for i in results) + + +def test_list_url_lists_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + url_list.UrlList(), + ], + next_page_token="abc", + ), + url_list.ListUrlListsResponse( + url_lists=[], + next_page_token="def", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + ], + next_page_token="ghi", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + ], + ), + RuntimeError, + ) + pages = list(client.list_url_lists(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_url_lists_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_url_lists), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + url_list.UrlList(), + ], + next_page_token="abc", + ), + url_list.ListUrlListsResponse( + url_lists=[], + next_page_token="def", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + ], + next_page_token="ghi", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_url_lists( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, url_list.UrlList) for i in responses) + + +@pytest.mark.asyncio +async def test_list_url_lists_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_url_lists), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + url_list.UrlList(), + ], + next_page_token="abc", + ), + url_list.ListUrlListsResponse( + url_lists=[], + next_page_token="def", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + ], + next_page_token="ghi", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_url_lists(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.GetUrlListRequest, + dict, + ], +) +def test_get_url_list(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.UrlList( + name="name_value", + description="description_value", + values=["values_value"], + ) + response = client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = url_list.GetUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, url_list.UrlList) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.values == ["values_value"] + + +def test_get_url_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = url_list.GetUrlListRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_url_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == url_list.GetUrlListRequest( + name="name_value", + ) + + +def test_get_url_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_url_list] = mock_rpc + request = {} + client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_url_list_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_url_list + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_url_list + ] = mock_rpc + + request = {} + await client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_url_list_async( + transport: str = "grpc_asyncio", request_type=url_list.GetUrlListRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.UrlList( + name="name_value", + description="description_value", + values=["values_value"], + ) + ) + response = await client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = url_list.GetUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, url_list.UrlList) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.values == ["values_value"] + + +@pytest.mark.asyncio +async def test_get_url_list_async_from_dict(): + await test_get_url_list_async(request_type=dict) + + +def test_get_url_list_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.GetUrlListRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + call.return_value = url_list.UrlList() + client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_url_list_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.GetUrlListRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(url_list.UrlList()) + await client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_url_list_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.UrlList() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_url_list( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_url_list_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_url_list( + url_list.GetUrlListRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_url_list_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = url_list.UrlList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(url_list.UrlList()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_url_list( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_url_list_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_url_list( + url_list.GetUrlListRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_url_list.CreateUrlListRequest, + dict, + ], +) +def test_create_url_list(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_url_list.CreateUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_url_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_url_list.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_url_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_url_list.CreateUrlListRequest( + parent="parent_value", + url_list_id="url_list_id_value", + ) + + +def test_create_url_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_url_list] = mock_rpc + request = {} + client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_url_list_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_url_list + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_url_list + ] = mock_rpc + + request = {} + await client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_url_list_async( + transport: str = "grpc_asyncio", request_type=gcn_url_list.CreateUrlListRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_url_list.CreateUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_url_list_async_from_dict(): + await test_create_url_list_async(request_type=dict) + + +def test_create_url_list_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_url_list.CreateUrlListRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_url_list_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_url_list.CreateUrlListRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_url_list_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_url_list( + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].url_list + mock_val = gcn_url_list.UrlList(name="name_value") + assert arg == mock_val + arg = args[0].url_list_id + mock_val = "url_list_id_value" + assert arg == mock_val + + +def test_create_url_list_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_url_list( + gcn_url_list.CreateUrlListRequest(), + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_url_list_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_url_list( + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].url_list + mock_val = gcn_url_list.UrlList(name="name_value") + assert arg == mock_val + arg = args[0].url_list_id + mock_val = "url_list_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_url_list_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_url_list( + gcn_url_list.CreateUrlListRequest(), + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_url_list.UpdateUrlListRequest, + dict, + ], +) +def test_update_url_list(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_url_list.UpdateUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_url_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_url_list.UpdateUrlListRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_url_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_url_list.UpdateUrlListRequest() + + +def test_update_url_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_url_list] = mock_rpc + request = {} + client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_url_list_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_url_list + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_url_list + ] = mock_rpc + + request = {} + await client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_url_list_async( + transport: str = "grpc_asyncio", request_type=gcn_url_list.UpdateUrlListRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_url_list.UpdateUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_url_list_async_from_dict(): + await test_update_url_list_async(request_type=dict) + + +def test_update_url_list_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_url_list.UpdateUrlListRequest() + + request.url_list.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "url_list.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_url_list_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_url_list.UpdateUrlListRequest() + + request.url_list.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "url_list.name=name_value", + ) in kw["metadata"] + + +def test_update_url_list_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_url_list( + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].url_list + mock_val = gcn_url_list.UrlList(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_url_list_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_url_list( + gcn_url_list.UpdateUrlListRequest(), + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_url_list_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_url_list( + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].url_list + mock_val = gcn_url_list.UrlList(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_url_list_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_url_list( + gcn_url_list.UpdateUrlListRequest(), + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.DeleteUrlListRequest, + dict, + ], +) +def test_delete_url_list(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = url_list.DeleteUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_url_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = url_list.DeleteUrlListRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_url_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == url_list.DeleteUrlListRequest( + name="name_value", + ) + + +def test_delete_url_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_url_list] = mock_rpc + request = {} + client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_url_list_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_url_list + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_url_list + ] = mock_rpc + + request = {} + await client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_url_list_async( + transport: str = "grpc_asyncio", request_type=url_list.DeleteUrlListRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = url_list.DeleteUrlListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_url_list_async_from_dict(): + await test_delete_url_list_async(request_type=dict) + + +def test_delete_url_list_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.DeleteUrlListRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_url_list_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = url_list.DeleteUrlListRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_url_list_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_url_list( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_url_list_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_url_list( + url_list.DeleteUrlListRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_url_list_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_url_list( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_url_list_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_url_list( + url_list.DeleteUrlListRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.ListTlsInspectionPoliciesRequest, + dict, + ], +) +def test_list_tls_inspection_policies(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTlsInspectionPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_tls_inspection_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_tls_inspection_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tls_inspection_policy.ListTlsInspectionPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_tls_inspection_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_tls_inspection_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_tls_inspection_policies + ] = mock_rpc + request = {} + client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tls_inspection_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_tls_inspection_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_tls_inspection_policies + ] = mock_rpc + + request = {} + await client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_tls_inspection_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_async( + transport: str = "grpc_asyncio", + request_type=tls_inspection_policy.ListTlsInspectionPoliciesRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTlsInspectionPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_async_from_dict(): + await test_list_tls_inspection_policies_async(request_type=dict) + + +def test_list_tls_inspection_policies_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + call.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.ListTlsInspectionPoliciesResponse() + ) + await client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tls_inspection_policies_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tls_inspection_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tls_inspection_policies_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tls_inspection_policies( + tls_inspection_policy.ListTlsInspectionPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.ListTlsInspectionPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tls_inspection_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tls_inspection_policies( + tls_inspection_policy.ListTlsInspectionPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_tls_inspection_policies_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="abc", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[], + next_page_token="def", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="ghi", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tls_inspection_policies( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tls_inspection_policy.TlsInspectionPolicy) for i in results + ) + + +def test_list_tls_inspection_policies_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="abc", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[], + next_page_token="def", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="ghi", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tls_inspection_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="abc", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[], + next_page_token="def", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="ghi", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tls_inspection_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, tls_inspection_policy.TlsInspectionPolicy) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="abc", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[], + next_page_token="def", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="ghi", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tls_inspection_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.GetTlsInspectionPolicyRequest, + dict, + ], +) +def test_get_tls_inspection_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.TlsInspectionPolicy( + name="name_value", + description="description_value", + ca_pool="ca_pool_value", + trust_config="trust_config_value", + exclude_public_ca_set=True, + min_tls_version=tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0, + tls_feature_profile=tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE, + custom_tls_features=["custom_tls_features_value"], + ) + response = client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.GetTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tls_inspection_policy.TlsInspectionPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.ca_pool == "ca_pool_value" + assert response.trust_config == "trust_config_value" + assert response.exclude_public_ca_set is True + assert ( + response.min_tls_version + == tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0 + ) + assert ( + response.tls_feature_profile + == tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE + ) + assert response.custom_tls_features == ["custom_tls_features_value"] + + +def test_get_tls_inspection_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = tls_inspection_policy.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_tls_inspection_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tls_inspection_policy.GetTlsInspectionPolicyRequest( + name="name_value", + ) + + +def test_get_tls_inspection_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_tls_inspection_policy + ] = mock_rpc + request = {} + client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_tls_inspection_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_tls_inspection_policy + ] = mock_rpc + + request = {} + await client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_async( + transport: str = "grpc_asyncio", + request_type=tls_inspection_policy.GetTlsInspectionPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.TlsInspectionPolicy( + name="name_value", + description="description_value", + ca_pool="ca_pool_value", + trust_config="trust_config_value", + exclude_public_ca_set=True, + min_tls_version=tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0, + tls_feature_profile=tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE, + custom_tls_features=["custom_tls_features_value"], + ) + ) + response = await client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.GetTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tls_inspection_policy.TlsInspectionPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.ca_pool == "ca_pool_value" + assert response.trust_config == "trust_config_value" + assert response.exclude_public_ca_set is True + assert ( + response.min_tls_version + == tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0 + ) + assert ( + response.tls_feature_profile + == tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE + ) + assert response.custom_tls_features == ["custom_tls_features_value"] + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_async_from_dict(): + await test_get_tls_inspection_policy_async(request_type=dict) + + +def test_get_tls_inspection_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.GetTlsInspectionPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + call.return_value = tls_inspection_policy.TlsInspectionPolicy() + client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.GetTlsInspectionPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.TlsInspectionPolicy() + ) + await client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_tls_inspection_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.TlsInspectionPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tls_inspection_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_tls_inspection_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tls_inspection_policy( + tls_inspection_policy.GetTlsInspectionPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tls_inspection_policy.TlsInspectionPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.TlsInspectionPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tls_inspection_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tls_inspection_policy( + tls_inspection_policy.GetTlsInspectionPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, + dict, + ], +) +def test_create_tls_inspection_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_tls_inspection_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_tls_inspection_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest( + parent="parent_value", + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + +def test_create_tls_inspection_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_tls_inspection_policy + ] = mock_rpc + request = {} + client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_tls_inspection_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_tls_inspection_policy + ] = mock_rpc + + request = {} + await client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_async_from_dict(): + await test_create_tls_inspection_policy_async(request_type=dict) + + +def test_create_tls_inspection_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_tls_inspection_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tls_inspection_policy( + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tls_inspection_policy + mock_val = gcn_tls_inspection_policy.TlsInspectionPolicy(name="name_value") + assert arg == mock_val + arg = args[0].tls_inspection_policy_id + mock_val = "tls_inspection_policy_id_value" + assert arg == mock_val + + +def test_create_tls_inspection_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tls_inspection_policy( + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest(), + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tls_inspection_policy( + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].tls_inspection_policy + mock_val = gcn_tls_inspection_policy.TlsInspectionPolicy(name="name_value") + assert arg == mock_val + arg = args[0].tls_inspection_policy_id + mock_val = "tls_inspection_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tls_inspection_policy( + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest(), + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, + dict, + ], +) +def test_update_tls_inspection_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_tls_inspection_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_tls_inspection_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + +def test_update_tls_inspection_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_tls_inspection_policy + ] = mock_rpc + request = {} + client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_tls_inspection_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_tls_inspection_policy + ] = mock_rpc + + request = {} + await client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_async_from_dict(): + await test_update_tls_inspection_policy_async(request_type=dict) + + +def test_update_tls_inspection_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + request.tls_inspection_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tls_inspection_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + request.tls_inspection_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tls_inspection_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_tls_inspection_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tls_inspection_policy( + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tls_inspection_policy + mock_val = gcn_tls_inspection_policy.TlsInspectionPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_tls_inspection_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tls_inspection_policy( + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest(), + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tls_inspection_policy( + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tls_inspection_policy + mock_val = gcn_tls_inspection_policy.TlsInspectionPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tls_inspection_policy( + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest(), + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.DeleteTlsInspectionPolicyRequest, + dict, + ], +) +def test_delete_tls_inspection_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_tls_inspection_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_tls_inspection_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == tls_inspection_policy.DeleteTlsInspectionPolicyRequest( + name="name_value", + ) + + +def test_delete_tls_inspection_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_tls_inspection_policy + ] = mock_rpc + request = {} + client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_tls_inspection_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_tls_inspection_policy + ] = mock_rpc + + request = {} + await client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_async( + transport: str = "grpc_asyncio", + request_type=tls_inspection_policy.DeleteTlsInspectionPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_async_from_dict(): + await test_delete_tls_inspection_policy_async(request_type=dict) + + +def test_delete_tls_inspection_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_tls_inspection_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tls_inspection_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_tls_inspection_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tls_inspection_policy( + tls_inspection_policy.DeleteTlsInspectionPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tls_inspection_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tls_inspection_policy( + tls_inspection_policy.DeleteTlsInspectionPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.ListAuthzPoliciesRequest, + dict, + ], +) +def test_list_authz_policies(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.ListAuthzPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = authz_policy.ListAuthzPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthzPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_authz_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = authz_policy.ListAuthzPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_authz_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == authz_policy.ListAuthzPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_authz_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_authz_policies in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authz_policies + ] = mock_rpc + request = {} + client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_authz_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_authz_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_authz_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_authz_policies + ] = mock_rpc + + request = {} + await client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_authz_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_authz_policies_async( + transport: str = "grpc_asyncio", request_type=authz_policy.ListAuthzPoliciesRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.ListAuthzPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = authz_policy.ListAuthzPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthzPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_authz_policies_async_from_dict(): + await test_list_authz_policies_async(request_type=dict) + + +def test_list_authz_policies_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.ListAuthzPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + call.return_value = authz_policy.ListAuthzPoliciesResponse() + client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_authz_policies_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.ListAuthzPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.ListAuthzPoliciesResponse() + ) + await client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_authz_policies_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.ListAuthzPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_authz_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_authz_policies_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_authz_policies( + authz_policy.ListAuthzPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_authz_policies_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.ListAuthzPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.ListAuthzPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_authz_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_authz_policies_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_authz_policies( + authz_policy.ListAuthzPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_authz_policies_pager(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + next_page_token="abc", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[], + next_page_token="def", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + ], + next_page_token="ghi", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_authz_policies(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, authz_policy.AuthzPolicy) for i in results) + + +def test_list_authz_policies_pages(transport_name: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + next_page_token="abc", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[], + next_page_token="def", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + ], + next_page_token="ghi", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_authz_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_authz_policies_async_pager(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + next_page_token="abc", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[], + next_page_token="def", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + ], + next_page_token="ghi", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_authz_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, authz_policy.AuthzPolicy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_authz_policies_async_pages(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + next_page_token="abc", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[], + next_page_token="def", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + ], + next_page_token="ghi", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_authz_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.GetAuthzPolicyRequest, + dict, + ], +) +def test_get_authz_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.AuthzPolicy( + name="name_value", + description="description_value", + action=authz_policy.AuthzPolicy.AuthzAction.ALLOW, + ) + response = client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = authz_policy.GetAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, authz_policy.AuthzPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.action == authz_policy.AuthzPolicy.AuthzAction.ALLOW + + +def test_get_authz_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = authz_policy.GetAuthzPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_authz_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == authz_policy.GetAuthzPolicyRequest( + name="name_value", + ) + + +def test_get_authz_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_authz_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authz_policy + ] = mock_rpc + request = {} + client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_authz_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_authz_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_authz_policy + ] = mock_rpc + + request = {} + await client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_authz_policy_async( + transport: str = "grpc_asyncio", request_type=authz_policy.GetAuthzPolicyRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.AuthzPolicy( + name="name_value", + description="description_value", + action=authz_policy.AuthzPolicy.AuthzAction.ALLOW, + ) + ) + response = await client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = authz_policy.GetAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, authz_policy.AuthzPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.action == authz_policy.AuthzPolicy.AuthzAction.ALLOW + + +@pytest.mark.asyncio +async def test_get_authz_policy_async_from_dict(): + await test_get_authz_policy_async(request_type=dict) + + +def test_get_authz_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.GetAuthzPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + call.return_value = authz_policy.AuthzPolicy() + client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_authz_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.GetAuthzPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.AuthzPolicy() + ) + await client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_authz_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.AuthzPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_authz_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_authz_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authz_policy( + authz_policy.GetAuthzPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_authz_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = authz_policy.AuthzPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.AuthzPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_authz_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_authz_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_authz_policy( + authz_policy.GetAuthzPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authz_policy.CreateAuthzPolicyRequest, + dict, + ], +) +def test_create_authz_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_authz_policy.CreateAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_authz_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_authz_policy.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_authz_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_authz_policy.CreateAuthzPolicyRequest( + parent="parent_value", + authz_policy_id="authz_policy_id_value", + ) + + +def test_create_authz_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authz_policy + ] = mock_rpc + request = {} + client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_authz_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_authz_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_authz_policy + ] = mock_rpc + + request = {} + await client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_authz_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_authz_policy.CreateAuthzPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_authz_policy.CreateAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_authz_policy_async_from_dict(): + await test_create_authz_policy_async(request_type=dict) + + +def test_create_authz_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_authz_policy.CreateAuthzPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_authz_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_authz_policy.CreateAuthzPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_authz_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_authz_policy( + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].authz_policy + mock_val = gcn_authz_policy.AuthzPolicy(name="name_value") + assert arg == mock_val + arg = args[0].authz_policy_id + mock_val = "authz_policy_id_value" + assert arg == mock_val + + +def test_create_authz_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_authz_policy( + gcn_authz_policy.CreateAuthzPolicyRequest(), + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_authz_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_authz_policy( + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].authz_policy + mock_val = gcn_authz_policy.AuthzPolicy(name="name_value") + assert arg == mock_val + arg = args[0].authz_policy_id + mock_val = "authz_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_authz_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_authz_policy( + gcn_authz_policy.CreateAuthzPolicyRequest(), + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authz_policy.UpdateAuthzPolicyRequest, + dict, + ], +) +def test_update_authz_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_authz_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_authz_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_authz_policy.UpdateAuthzPolicyRequest() + + +def test_update_authz_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authz_policy + ] = mock_rpc + request = {} + client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_authz_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_authz_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_authz_policy + ] = mock_rpc + + request = {} + await client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_authz_policy_async( + transport: str = "grpc_asyncio", + request_type=gcn_authz_policy.UpdateAuthzPolicyRequest, +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_authz_policy_async_from_dict(): + await test_update_authz_policy_async(request_type=dict) + + +def test_update_authz_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + + request.authz_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "authz_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_authz_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + + request.authz_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "authz_policy.name=name_value", + ) in kw["metadata"] + + +def test_update_authz_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_authz_policy( + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].authz_policy + mock_val = gcn_authz_policy.AuthzPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_authz_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_authz_policy( + gcn_authz_policy.UpdateAuthzPolicyRequest(), + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_authz_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_authz_policy( + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].authz_policy + mock_val = gcn_authz_policy.AuthzPolicy(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_authz_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_authz_policy( + gcn_authz_policy.UpdateAuthzPolicyRequest(), + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.DeleteAuthzPolicyRequest, + dict, + ], +) +def test_delete_authz_policy(request_type, transport: str = "grpc"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = authz_policy.DeleteAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_authz_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = authz_policy.DeleteAuthzPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_authz_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == authz_policy.DeleteAuthzPolicyRequest( + name="name_value", + ) + + +def test_delete_authz_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authz_policy + ] = mock_rpc + request = {} + client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_authz_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_authz_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_authz_policy + ] = mock_rpc + + request = {} + await client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_authz_policy_async( + transport: str = "grpc_asyncio", request_type=authz_policy.DeleteAuthzPolicyRequest +): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = authz_policy.DeleteAuthzPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_authz_policy_async_from_dict(): + await test_delete_authz_policy_async(request_type=dict) + + +def test_delete_authz_policy_field_headers(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.DeleteAuthzPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_authz_policy_field_headers_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = authz_policy.DeleteAuthzPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_authz_policy_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_authz_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_authz_policy_flattened_error(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_authz_policy( + authz_policy.DeleteAuthzPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_authz_policy_flattened_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_authz_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_authz_policy_flattened_error_async(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_authz_policy( + authz_policy.DeleteAuthzPolicyRequest(), + name="name_value", + ) + + +def test_list_authorization_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_authorization_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authorization_policies + ] = mock_rpc + + request = {} + client.list_authorization_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_authorization_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_authorization_policies_rest_required_fields( + request_type=authorization_policy.ListAuthorizationPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authorization_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authorization_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = authorization_policy.ListAuthorizationPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authorization_policy.ListAuthorizationPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_authorization_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_authorization_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_authorization_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_authorization_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authorization_policy.ListAuthorizationPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = authorization_policy.ListAuthorizationPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_authorization_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/authorizationPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_authorization_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_authorization_policies( + authorization_policy.ListAuthorizationPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_authorization_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + ], + next_page_token="abc", + ), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[], + next_page_token="def", + ), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + ], + next_page_token="ghi", + ), + authorization_policy.ListAuthorizationPoliciesResponse( + authorization_policies=[ + authorization_policy.AuthorizationPolicy(), + authorization_policy.AuthorizationPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + authorization_policy.ListAuthorizationPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_authorization_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, authorization_policy.AuthorizationPolicy) for i in results + ) + + pages = list(client.list_authorization_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_authorization_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_authorization_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authorization_policy + ] = mock_rpc + + request = {} + client.get_authorization_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authorization_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_authorization_policy_rest_required_fields( + request_type=authorization_policy.GetAuthorizationPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = authorization_policy.AuthorizationPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authorization_policy.AuthorizationPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_authorization_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_authorization_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_authorization_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_authorization_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authorization_policy.AuthorizationPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = authorization_policy.AuthorizationPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_authorization_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/authorizationPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_authorization_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authorization_policy( + authorization_policy.GetAuthorizationPolicyRequest(), + name="name_value", + ) + + +def test_create_authorization_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authorization_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authorization_policy + ] = mock_rpc + + request = {} + client.create_authorization_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authorization_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_authorization_policy_rest_required_fields( + request_type=gcn_authorization_policy.CreateAuthorizationPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["authorization_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "authorizationPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "authorizationPolicyId" in jsonified_request + assert ( + jsonified_request["authorizationPolicyId"] + == request_init["authorization_policy_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["authorizationPolicyId"] = "authorization_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authorization_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("authorization_policy_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "authorizationPolicyId" in jsonified_request + assert jsonified_request["authorizationPolicyId"] == "authorization_policy_id_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_authorization_policy(request) + + expected_params = [ + ( + "authorizationPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_authorization_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_authorization_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("authorizationPolicyId",)) + & set( + ( + "parent", + "authorizationPolicyId", + "authorizationPolicy", + ) + ) + ) + + +def test_create_authorization_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_authorization_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/authorizationPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_authorization_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_authorization_policy( + gcn_authorization_policy.CreateAuthorizationPolicyRequest(), + parent="parent_value", + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + authorization_policy_id="authorization_policy_id_value", + ) + + +def test_update_authorization_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authorization_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authorization_policy + ] = mock_rpc + + request = {} + client.update_authorization_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authorization_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_authorization_policy_rest_required_fields( + request_type=gcn_authorization_policy.UpdateAuthorizationPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authorization_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_authorization_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_authorization_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_authorization_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("authorizationPolicy",))) + + +def test_update_authorization_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "authorization_policy": { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_authorization_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{authorization_policy.name=projects/*/locations/*/authorizationPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_authorization_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_authorization_policy( + gcn_authorization_policy.UpdateAuthorizationPolicyRequest(), + authorization_policy=gcn_authorization_policy.AuthorizationPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_authorization_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authorization_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authorization_policy + ] = mock_rpc + + request = {} + client.delete_authorization_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_authorization_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_authorization_policy_rest_required_fields( + request_type=authorization_policy.DeleteAuthorizationPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authorization_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_authorization_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_authorization_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_authorization_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_authorization_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_authorization_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/authorizationPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_authorization_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_authorization_policy( + authorization_policy.DeleteAuthorizationPolicyRequest(), + name="name_value", + ) + + +def test_list_backend_authentication_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backend_authentication_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backend_authentication_configs + ] = mock_rpc + + request = {} + client.list_backend_authentication_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backend_authentication_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backend_authentication_configs_rest_required_fields( + request_type=backend_authentication_config.ListBackendAuthenticationConfigsRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backend_authentication_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backend_authentication_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backend_authentication_config.ListBackendAuthenticationConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backend_authentication_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backend_authentication_configs_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_backend_authentication_configs._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_backend_authentication_configs_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backend_authentication_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/backendAuthenticationConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_backend_authentication_configs_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backend_authentication_configs( + backend_authentication_config.ListBackendAuthenticationConfigsRequest(), + parent="parent_value", + ) + + +def test_list_backend_authentication_configs_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="abc", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[], + next_page_token="def", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + ], + next_page_token="ghi", + ), + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + backend_authentication_configs=[ + backend_authentication_config.BackendAuthenticationConfig(), + backend_authentication_config.BackendAuthenticationConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backend_authentication_config.ListBackendAuthenticationConfigsResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backend_authentication_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backend_authentication_config.BackendAuthenticationConfig) + for i in results + ) + + pages = list( + client.list_backend_authentication_configs(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backend_authentication_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backend_authentication_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backend_authentication_config + ] = mock_rpc + + request = {} + client.get_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backend_authentication_config_rest_required_fields( + request_type=backend_authentication_config.GetBackendAuthenticationConfigRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backend_authentication_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backend_authentication_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backend_authentication_config.BackendAuthenticationConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backend_authentication_config.BackendAuthenticationConfig.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backend_authentication_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backend_authentication_config_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_backend_authentication_config._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_backend_authentication_config_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backend_authentication_config.BackendAuthenticationConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backend_authentication_config.BackendAuthenticationConfig.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backend_authentication_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/backendAuthenticationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backend_authentication_config_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backend_authentication_config( + backend_authentication_config.GetBackendAuthenticationConfigRequest(), + name="name_value", + ) + + +def test_create_backend_authentication_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backend_authentication_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backend_authentication_config + ] = mock_rpc + + request = {} + client.create_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backend_authentication_config_rest_required_fields( + request_type=gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backend_authentication_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backendAuthenticationConfigId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backend_authentication_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backendAuthenticationConfigId" in jsonified_request + assert ( + jsonified_request["backendAuthenticationConfigId"] + == request_init["backend_authentication_config_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "backendAuthenticationConfigId" + ] = "backend_authentication_config_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backend_authentication_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backend_authentication_config_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backendAuthenticationConfigId" in jsonified_request + assert ( + jsonified_request["backendAuthenticationConfigId"] + == "backend_authentication_config_id_value" + ) + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backend_authentication_config(request) + + expected_params = [ + ( + "backendAuthenticationConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backend_authentication_config_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_backend_authentication_config._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("backendAuthenticationConfigId",)) + & set( + ( + "parent", + "backendAuthenticationConfigId", + "backendAuthenticationConfig", + ) + ) + ) + + +def test_create_backend_authentication_config_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backend_authentication_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/backendAuthenticationConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_backend_authentication_config_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backend_authentication_config( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest(), + parent="parent_value", + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + backend_authentication_config_id="backend_authentication_config_id_value", + ) + + +def test_update_backend_authentication_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backend_authentication_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backend_authentication_config + ] = mock_rpc + + request = {} + client.update_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backend_authentication_config_rest_required_fields( + request_type=gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backend_authentication_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backend_authentication_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backend_authentication_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backend_authentication_config_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.update_backend_authentication_config._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("updateMask",)) & set(("backendAuthenticationConfig",)) + ) + + +def test_update_backend_authentication_config_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backend_authentication_config": { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backend_authentication_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{backend_authentication_config.name=projects/*/locations/*/backendAuthenticationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backend_authentication_config_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backend_authentication_config( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest(), + backend_authentication_config=gcn_backend_authentication_config.BackendAuthenticationConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_backend_authentication_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backend_authentication_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backend_authentication_config + ] = mock_rpc + + request = {} + client.delete_backend_authentication_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backend_authentication_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backend_authentication_config_rest_required_fields( + request_type=backend_authentication_config.DeleteBackendAuthenticationConfigRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backend_authentication_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backend_authentication_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backend_authentication_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backend_authentication_config_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_backend_authentication_config._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +def test_delete_backend_authentication_config_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backend_authentication_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/backendAuthenticationConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backend_authentication_config_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backend_authentication_config( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest(), + name="name_value", + ) + + +def test_list_server_tls_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_server_tls_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_server_tls_policies + ] = mock_rpc + + request = {} + client.list_server_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_server_tls_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_server_tls_policies_rest_required_fields( + request_type=server_tls_policy.ListServerTlsPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_server_tls_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_server_tls_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ListServerTlsPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = server_tls_policy.ListServerTlsPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_server_tls_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_server_tls_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_server_tls_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "returnPartialSuccess", + ) + ) + & set(("parent",)) + ) + + +def test_list_server_tls_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ListServerTlsPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = server_tls_policy.ListServerTlsPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_server_tls_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/serverTlsPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_server_tls_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_server_tls_policies( + server_tls_policy.ListServerTlsPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_server_tls_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="abc", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[], + next_page_token="def", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + ], + next_page_token="ghi", + ), + server_tls_policy.ListServerTlsPoliciesResponse( + server_tls_policies=[ + server_tls_policy.ServerTlsPolicy(), + server_tls_policy.ServerTlsPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + server_tls_policy.ListServerTlsPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_server_tls_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, server_tls_policy.ServerTlsPolicy) for i in results) + + pages = list(client.list_server_tls_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_server_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_server_tls_policy + ] = mock_rpc + + request = {} + client.get_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_server_tls_policy_rest_required_fields( + request_type=server_tls_policy.GetServerTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ServerTlsPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = server_tls_policy.ServerTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_server_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_server_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_server_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_server_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ServerTlsPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = server_tls_policy.ServerTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_server_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/serverTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_server_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_server_tls_policy( + server_tls_policy.GetServerTlsPolicyRequest(), + name="name_value", + ) + + +def test_create_server_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_server_tls_policy + ] = mock_rpc + + request = {} + client.create_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_server_tls_policy_rest_required_fields( + request_type=gcn_server_tls_policy.CreateServerTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["server_tls_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "serverTlsPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "serverTlsPolicyId" in jsonified_request + assert ( + jsonified_request["serverTlsPolicyId"] == request_init["server_tls_policy_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["serverTlsPolicyId"] = "server_tls_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_server_tls_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("server_tls_policy_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "serverTlsPolicyId" in jsonified_request + assert jsonified_request["serverTlsPolicyId"] == "server_tls_policy_id_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_server_tls_policy(request) + + expected_params = [ + ( + "serverTlsPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_server_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_server_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("serverTlsPolicyId",)) + & set( + ( + "parent", + "serverTlsPolicyId", + "serverTlsPolicy", + ) + ) + ) + + +def test_create_server_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_server_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/serverTlsPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_server_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_server_tls_policy( + gcn_server_tls_policy.CreateServerTlsPolicyRequest(), + parent="parent_value", + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + server_tls_policy_id="server_tls_policy_id_value", + ) + + +def test_update_server_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_server_tls_policy + ] = mock_rpc + + request = {} + client.update_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_server_tls_policy_rest_required_fields( + request_type=gcn_server_tls_policy.UpdateServerTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_server_tls_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_server_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_server_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_server_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("serverTlsPolicy",))) + + +def test_update_server_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "server_tls_policy": { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_server_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{server_tls_policy.name=projects/*/locations/*/serverTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_server_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_server_tls_policy( + gcn_server_tls_policy.UpdateServerTlsPolicyRequest(), + server_tls_policy=gcn_server_tls_policy.ServerTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_server_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_server_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_server_tls_policy + ] = mock_rpc + + request = {} + client.delete_server_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_server_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_server_tls_policy_rest_required_fields( + request_type=server_tls_policy.DeleteServerTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_server_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_server_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_server_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_server_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_server_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_server_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/serverTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_server_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_server_tls_policy( + server_tls_policy.DeleteServerTlsPolicyRequest(), + name="name_value", + ) + + +def test_list_client_tls_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_client_tls_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_client_tls_policies + ] = mock_rpc + + request = {} + client.list_client_tls_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_client_tls_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_client_tls_policies_rest_required_fields( + request_type=client_tls_policy.ListClientTlsPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_client_tls_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_client_tls_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ListClientTlsPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_client_tls_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_client_tls_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_client_tls_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_client_tls_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ListClientTlsPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_client_tls_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_client_tls_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_client_tls_policies( + client_tls_policy.ListClientTlsPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_client_tls_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="abc", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[], + next_page_token="def", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + ], + next_page_token="ghi", + ), + client_tls_policy.ListClientTlsPoliciesResponse( + client_tls_policies=[ + client_tls_policy.ClientTlsPolicy(), + client_tls_policy.ClientTlsPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + client_tls_policy.ListClientTlsPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_client_tls_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, client_tls_policy.ClientTlsPolicy) for i in results) + + pages = list(client.list_client_tls_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_client_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_client_tls_policy + ] = mock_rpc + + request = {} + client.get_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_client_tls_policy_rest_required_fields( + request_type=client_tls_policy.GetClientTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ClientTlsPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_client_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_client_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_client_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_client_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ClientTlsPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_client_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_client_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_client_tls_policy( + client_tls_policy.GetClientTlsPolicyRequest(), + name="name_value", + ) + + +def test_create_client_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_client_tls_policy + ] = mock_rpc + + request = {} + client.create_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_client_tls_policy_rest_required_fields( + request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["client_tls_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "clientTlsPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "clientTlsPolicyId" in jsonified_request + assert ( + jsonified_request["clientTlsPolicyId"] == request_init["client_tls_policy_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["clientTlsPolicyId"] = "client_tls_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_client_tls_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("client_tls_policy_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "clientTlsPolicyId" in jsonified_request + assert jsonified_request["clientTlsPolicyId"] == "client_tls_policy_id_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_client_tls_policy(request) + + expected_params = [ + ( + "clientTlsPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_client_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_client_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("clientTlsPolicyId",)) + & set( + ( + "parent", + "clientTlsPolicyId", + "clientTlsPolicy", + ) + ) + ) + + +def test_create_client_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_client_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/clientTlsPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_client_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_client_tls_policy( + gcn_client_tls_policy.CreateClientTlsPolicyRequest(), + parent="parent_value", + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + client_tls_policy_id="client_tls_policy_id_value", + ) + + +def test_update_client_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_client_tls_policy + ] = mock_rpc + + request = {} + client.update_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_client_tls_policy_rest_required_fields( + request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_client_tls_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_client_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_client_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_client_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("clientTlsPolicy",))) + + +def test_update_client_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "client_tls_policy": { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_client_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{client_tls_policy.name=projects/*/locations/*/clientTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_client_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_client_tls_policy( + gcn_client_tls_policy.UpdateClientTlsPolicyRequest(), + client_tls_policy=gcn_client_tls_policy.ClientTlsPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_client_tls_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_client_tls_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_client_tls_policy + ] = mock_rpc + + request = {} + client.delete_client_tls_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_client_tls_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_client_tls_policy_rest_required_fields( + request_type=client_tls_policy.DeleteClientTlsPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_client_tls_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_client_tls_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_client_tls_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_client_tls_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_client_tls_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_client_tls_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/clientTlsPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_client_tls_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_client_tls_policy( + client_tls_policy.DeleteClientTlsPolicyRequest(), + name="name_value", + ) + + +def test_list_gateway_security_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_gateway_security_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gateway_security_policies + ] = mock_rpc + + request = {} + client.list_gateway_security_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gateway_security_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_gateway_security_policies_rest_required_fields( + request_type=gateway_security_policy.ListGatewaySecurityPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gateway_security_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gateway_security_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_gateway_security_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_gateway_security_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_gateway_security_policies._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_gateway_security_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_gateway_security_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/gatewaySecurityPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_gateway_security_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gateway_security_policies( + gateway_security_policy.ListGatewaySecurityPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_gateway_security_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="abc", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[], + next_page_token="def", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + ], + next_page_token="ghi", + ), + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + gateway_security_policies=[ + gateway_security_policy.GatewaySecurityPolicy(), + gateway_security_policy.GatewaySecurityPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + gateway_security_policy.ListGatewaySecurityPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_gateway_security_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, gateway_security_policy.GatewaySecurityPolicy) + for i in results + ) + + pages = list( + client.list_gateway_security_policies(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_gateway_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_gateway_security_policy + ] = mock_rpc + + request = {} + client.get_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_gateway_security_policy_rest_required_fields( + request_type=gateway_security_policy.GetGatewaySecurityPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.GatewaySecurityPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gateway_security_policy.GatewaySecurityPolicy.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_gateway_security_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_gateway_security_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_gateway_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_gateway_security_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.GatewaySecurityPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gateway_security_policy.GatewaySecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_gateway_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_gateway_security_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_gateway_security_policy( + gateway_security_policy.GetGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +def test_create_gateway_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_gateway_security_policy + ] = mock_rpc + + request = {} + client.create_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_gateway_security_policy_rest_required_fields( + request_type=gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["gateway_security_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "gatewaySecurityPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "gatewaySecurityPolicyId" in jsonified_request + assert ( + jsonified_request["gatewaySecurityPolicyId"] + == request_init["gateway_security_policy_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["gatewaySecurityPolicyId"] = "gateway_security_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_gateway_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("gateway_security_policy_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "gatewaySecurityPolicyId" in jsonified_request + assert ( + jsonified_request["gatewaySecurityPolicyId"] + == "gateway_security_policy_id_value" + ) + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_gateway_security_policy(request) + + expected_params = [ + ( + "gatewaySecurityPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_gateway_security_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_gateway_security_policy._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("gatewaySecurityPolicyId",)) + & set( + ( + "parent", + "gatewaySecurityPolicyId", + "gatewaySecurityPolicy", + ) + ) + ) + + +def test_create_gateway_security_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_gateway_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/gatewaySecurityPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_gateway_security_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_gateway_security_policy( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest(), + parent="parent_value", + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + gateway_security_policy_id="gateway_security_policy_id_value", + ) + + +def test_update_gateway_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_gateway_security_policy + ] = mock_rpc + + request = {} + client.update_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_gateway_security_policy_rest_required_fields( + request_type=gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_gateway_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_gateway_security_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_gateway_security_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_gateway_security_policy._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("updateMask",)) & set(("gatewaySecurityPolicy",))) + + +def test_update_gateway_security_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "gateway_security_policy": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_gateway_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{gateway_security_policy.name=projects/*/locations/*/gatewaySecurityPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_gateway_security_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_gateway_security_policy( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest(), + gateway_security_policy=gcn_gateway_security_policy.GatewaySecurityPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_gateway_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_gateway_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_gateway_security_policy + ] = mock_rpc + + request = {} + client.delete_gateway_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_gateway_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_gateway_security_policy_rest_required_fields( + request_type=gateway_security_policy.DeleteGatewaySecurityPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_gateway_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_gateway_security_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_gateway_security_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_gateway_security_policy._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_gateway_security_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_gateway_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_gateway_security_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_gateway_security_policy( + gateway_security_policy.DeleteGatewaySecurityPolicyRequest(), + name="name_value", + ) + + +def test_list_gateway_security_policy_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_gateway_security_policy_rules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_gateway_security_policy_rules + ] = mock_rpc + + request = {} + client.list_gateway_security_policy_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_gateway_security_policy_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_gateway_security_policy_rules_rest_required_fields( + request_type=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gateway_security_policy_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_gateway_security_policy_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_gateway_security_policy_rules(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_gateway_security_policy_rules_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_gateway_security_policy_rules._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_gateway_security_policy_rules_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_gateway_security_policy_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/gatewaySecurityPolicies/*}/rules" + % client.transport._host, + args[1], + ) + + +def test_list_gateway_security_policy_rules_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_gateway_security_policy_rules( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest(), + parent="parent_value", + ) + + +def test_list_gateway_security_policy_rules_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="abc", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[], + next_page_token="def", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + next_page_token="ghi", + ), + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + gateway_security_policy_rules=[ + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + + pager = client.list_gateway_security_policy_rules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, gateway_security_policy_rule.GatewaySecurityPolicyRule) + for i in results + ) + + pages = list( + client.list_gateway_security_policy_rules(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_gateway_security_policy_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_gateway_security_policy_rule + ] = mock_rpc + + request = {} + client.get_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_gateway_security_policy_rule_rest_required_fields( + request_type=gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_gateway_security_policy_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_gateway_security_policy_rule_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_gateway_security_policy_rule._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_gateway_security_policy_rule_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_gateway_security_policy_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_get_gateway_security_policy_rule_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_gateway_security_policy_rule( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +def test_create_gateway_security_policy_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_gateway_security_policy_rule + ] = mock_rpc + + request = {} + client.create_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_gateway_security_policy_rule_rest_required_fields( + request_type=gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("gateway_security_policy_rule_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_gateway_security_policy_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_gateway_security_policy_rule_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.create_gateway_security_policy_rule._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("gatewaySecurityPolicyRuleId",)) + & set( + ( + "parent", + "gatewaySecurityPolicyRule", + ) + ) + ) + + +def test_create_gateway_security_policy_rule_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_gateway_security_policy_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/gatewaySecurityPolicies/*}/rules" + % client.transport._host, + args[1], + ) + + +def test_create_gateway_security_policy_rule_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest(), + parent="parent_value", + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + gateway_security_policy_rule_id="gateway_security_policy_rule_id_value", + ) + + +def test_update_gateway_security_policy_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_gateway_security_policy_rule + ] = mock_rpc + + request = {} + client.update_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_gateway_security_policy_rule_rest_required_fields( + request_type=gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_gateway_security_policy_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_gateway_security_policy_rule_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.update_gateway_security_policy_rule._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("updateMask",)) & set(("gatewaySecurityPolicyRule",)) + ) + + +def test_update_gateway_security_policy_rule_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "gateway_security_policy_rule": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_gateway_security_policy_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{gateway_security_policy_rule.name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_update_gateway_security_policy_rule_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_gateway_security_policy_rule( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest(), + gateway_security_policy_rule=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule( + basic_profile=gcn_gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_gateway_security_policy_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_gateway_security_policy_rule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_gateway_security_policy_rule + ] = mock_rpc + + request = {} + client.delete_gateway_security_policy_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_gateway_security_policy_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_gateway_security_policy_rule_rest_required_fields( + request_type=gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_gateway_security_policy_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_gateway_security_policy_rule(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_gateway_security_policy_rule_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.delete_gateway_security_policy_rule._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_gateway_security_policy_rule_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_gateway_security_policy_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/gatewaySecurityPolicies/*/rules/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_gateway_security_policy_rule_rest_flattened_error( + transport: str = "rest", +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_gateway_security_policy_rule( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest(), + name="name_value", + ) + + +def test_list_url_lists_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_url_lists in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_url_lists] = mock_rpc + + request = {} + client.list_url_lists(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_url_lists(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_url_lists_rest_required_fields(request_type=url_list.ListUrlListsRequest): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_url_lists._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_url_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = url_list.ListUrlListsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = url_list.ListUrlListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_url_lists(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_url_lists_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_url_lists._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_url_lists_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = url_list.ListUrlListsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = url_list.ListUrlListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_url_lists(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/urlLists" + % client.transport._host, + args[1], + ) + + +def test_list_url_lists_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_url_lists( + url_list.ListUrlListsRequest(), + parent="parent_value", + ) + + +def test_list_url_lists_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + url_list.UrlList(), + ], + next_page_token="abc", + ), + url_list.ListUrlListsResponse( + url_lists=[], + next_page_token="def", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + ], + next_page_token="ghi", + ), + url_list.ListUrlListsResponse( + url_lists=[ + url_list.UrlList(), + url_list.UrlList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(url_list.ListUrlListsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_url_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, url_list.UrlList) for i in results) + + pages = list(client.list_url_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_url_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_url_list] = mock_rpc + + request = {} + client.get_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_url_list_rest_required_fields(request_type=url_list.GetUrlListRequest): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = url_list.UrlList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = url_list.UrlList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_url_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_url_list_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_url_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_url_list_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = url_list.UrlList() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = url_list.UrlList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_url_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/urlLists/*}" + % client.transport._host, + args[1], + ) + + +def test_get_url_list_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_url_list( + url_list.GetUrlListRequest(), + name="name_value", + ) + + +def test_create_url_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_url_list] = mock_rpc + + request = {} + client.create_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_url_list_rest_required_fields( + request_type=gcn_url_list.CreateUrlListRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["url_list_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "urlListId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "urlListId" in jsonified_request + assert jsonified_request["urlListId"] == request_init["url_list_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["urlListId"] = "url_list_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_url_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("url_list_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "urlListId" in jsonified_request + assert jsonified_request["urlListId"] == "url_list_id_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_url_list(request) + + expected_params = [ + ( + "urlListId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_url_list_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_url_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("urlListId",)) + & set( + ( + "parent", + "urlListId", + "urlList", + ) + ) + ) + + +def test_create_url_list_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_url_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/urlLists" + % client.transport._host, + args[1], + ) + + +def test_create_url_list_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_url_list( + gcn_url_list.CreateUrlListRequest(), + parent="parent_value", + url_list=gcn_url_list.UrlList(name="name_value"), + url_list_id="url_list_id_value", + ) + + +def test_update_url_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_url_list] = mock_rpc + + request = {} + client.update_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_url_list_rest_required_fields( + request_type=gcn_url_list.UpdateUrlListRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_url_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_url_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_url_list_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_url_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("urlList",))) + + +def test_update_url_list_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "url_list": {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_url_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{url_list.name=projects/*/locations/*/urlLists/*}" + % client.transport._host, + args[1], + ) + + +def test_update_url_list_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_url_list( + gcn_url_list.UpdateUrlListRequest(), + url_list=gcn_url_list.UrlList(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_url_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_url_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_url_list] = mock_rpc + + request = {} + client.delete_url_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_url_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_url_list_rest_required_fields( + request_type=url_list.DeleteUrlListRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_url_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_url_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_url_list_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_url_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_url_list_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_url_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/urlLists/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_url_list_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_url_list( + url_list.DeleteUrlListRequest(), + name="name_value", + ) + + +def test_list_tls_inspection_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_tls_inspection_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_tls_inspection_policies + ] = mock_rpc + + request = {} + client.list_tls_inspection_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tls_inspection_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_tls_inspection_policies_rest_required_fields( + request_type=tls_inspection_policy.ListTlsInspectionPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tls_inspection_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tls_inspection_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_tls_inspection_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tls_inspection_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tls_inspection_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_tls_inspection_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_tls_inspection_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/tlsInspectionPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_tls_inspection_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tls_inspection_policies( + tls_inspection_policy.ListTlsInspectionPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_tls_inspection_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="abc", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[], + next_page_token="def", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + ], + next_page_token="ghi", + ), + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + tls_inspection_policies=[ + tls_inspection_policy.TlsInspectionPolicy(), + tls_inspection_policy.TlsInspectionPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + tls_inspection_policy.ListTlsInspectionPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_tls_inspection_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tls_inspection_policy.TlsInspectionPolicy) for i in results + ) + + pages = list(client.list_tls_inspection_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_tls_inspection_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_tls_inspection_policy + ] = mock_rpc + + request = {} + client.get_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_tls_inspection_policy_rest_required_fields( + request_type=tls_inspection_policy.GetTlsInspectionPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tls_inspection_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tls_inspection_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.TlsInspectionPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tls_inspection_policy.TlsInspectionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_tls_inspection_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_tls_inspection_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_tls_inspection_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_tls_inspection_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.TlsInspectionPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tls_inspection_policy.TlsInspectionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_tls_inspection_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/tlsInspectionPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_tls_inspection_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tls_inspection_policy( + tls_inspection_policy.GetTlsInspectionPolicyRequest(), + name="name_value", + ) + + +def test_create_tls_inspection_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_tls_inspection_policy + ] = mock_rpc + + request = {} + client.create_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_tls_inspection_policy_rest_required_fields( + request_type=gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["tls_inspection_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "tlsInspectionPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tls_inspection_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "tlsInspectionPolicyId" in jsonified_request + assert ( + jsonified_request["tlsInspectionPolicyId"] + == request_init["tls_inspection_policy_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["tlsInspectionPolicyId"] = "tls_inspection_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tls_inspection_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("tls_inspection_policy_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tlsInspectionPolicyId" in jsonified_request + assert ( + jsonified_request["tlsInspectionPolicyId"] == "tls_inspection_policy_id_value" + ) + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_tls_inspection_policy(request) + + expected_params = [ + ( + "tlsInspectionPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_tls_inspection_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_tls_inspection_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("tlsInspectionPolicyId",)) + & set( + ( + "parent", + "tlsInspectionPolicyId", + "tlsInspectionPolicy", + ) + ) + ) + + +def test_create_tls_inspection_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_tls_inspection_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/tlsInspectionPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_tls_inspection_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tls_inspection_policy( + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest(), + parent="parent_value", + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + tls_inspection_policy_id="tls_inspection_policy_id_value", + ) + + +def test_update_tls_inspection_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_tls_inspection_policy + ] = mock_rpc + + request = {} + client.update_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_tls_inspection_policy_rest_required_fields( + request_type=gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tls_inspection_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tls_inspection_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_tls_inspection_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_tls_inspection_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_tls_inspection_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("tlsInspectionPolicy",))) + + +def test_update_tls_inspection_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "tls_inspection_policy": { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_tls_inspection_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{tls_inspection_policy.name=projects/*/locations/*/tlsInspectionPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_tls_inspection_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tls_inspection_policy( + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest(), + tls_inspection_policy=gcn_tls_inspection_policy.TlsInspectionPolicy( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_tls_inspection_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_tls_inspection_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_tls_inspection_policy + ] = mock_rpc + + request = {} + client.delete_tls_inspection_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_tls_inspection_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_tls_inspection_policy_rest_required_fields( + request_type=tls_inspection_policy.DeleteTlsInspectionPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tls_inspection_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tls_inspection_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_tls_inspection_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_tls_inspection_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_tls_inspection_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_tls_inspection_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_tls_inspection_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/tlsInspectionPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_tls_inspection_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tls_inspection_policy( + tls_inspection_policy.DeleteTlsInspectionPolicyRequest(), + name="name_value", + ) + + +def test_list_authz_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_authz_policies in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authz_policies + ] = mock_rpc + + request = {} + client.list_authz_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_authz_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_authz_policies_rest_required_fields( + request_type=authz_policy.ListAuthzPoliciesRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authz_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authz_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = authz_policy.ListAuthzPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authz_policy.ListAuthzPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_authz_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_authz_policies_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_authz_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_authz_policies_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authz_policy.ListAuthzPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = authz_policy.ListAuthzPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_authz_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/authzPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_authz_policies_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_authz_policies( + authz_policy.ListAuthzPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_authz_policies_rest_pager(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + next_page_token="abc", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[], + next_page_token="def", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + ], + next_page_token="ghi", + ), + authz_policy.ListAuthzPoliciesResponse( + authz_policies=[ + authz_policy.AuthzPolicy(), + authz_policy.AuthzPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + authz_policy.ListAuthzPoliciesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_authz_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, authz_policy.AuthzPolicy) for i in results) + + pages = list(client.list_authz_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_authz_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_authz_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authz_policy + ] = mock_rpc + + request = {} + client.get_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_authz_policy_rest_required_fields( + request_type=authz_policy.GetAuthzPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authz_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authz_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = authz_policy.AuthzPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authz_policy.AuthzPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_authz_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_authz_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_authz_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_authz_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authz_policy.AuthzPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = authz_policy.AuthzPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_authz_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/authzPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_authz_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authz_policy( + authz_policy.GetAuthzPolicyRequest(), + name="name_value", + ) + + +def test_create_authz_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authz_policy + ] = mock_rpc + + request = {} + client.create_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_authz_policy_rest_required_fields( + request_type=gcn_authz_policy.CreateAuthzPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["authz_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "authzPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authz_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "authzPolicyId" in jsonified_request + assert jsonified_request["authzPolicyId"] == request_init["authz_policy_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["authzPolicyId"] = "authz_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authz_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authz_policy_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "authzPolicyId" in jsonified_request + assert jsonified_request["authzPolicyId"] == "authz_policy_id_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_authz_policy(request) + + expected_params = [ + ( + "authzPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_authz_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_authz_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authzPolicyId", + "requestId", + ) + ) + & set( + ( + "parent", + "authzPolicyId", + "authzPolicy", + ) + ) + ) + + +def test_create_authz_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_authz_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/authzPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_authz_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_authz_policy( + gcn_authz_policy.CreateAuthzPolicyRequest(), + parent="parent_value", + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + authz_policy_id="authz_policy_id_value", + ) + + +def test_update_authz_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authz_policy + ] = mock_rpc + + request = {} + client.update_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_authz_policy_rest_required_fields( + request_type=gcn_authz_policy.UpdateAuthzPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authz_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authz_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_authz_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_authz_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_authz_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "authzPolicy", + ) + ) + ) + + +def test_update_authz_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "authz_policy": { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_authz_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{authz_policy.name=projects/*/locations/*/authzPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_authz_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_authz_policy( + gcn_authz_policy.UpdateAuthzPolicyRequest(), + authz_policy=gcn_authz_policy.AuthzPolicy(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_authz_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authz_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authz_policy + ] = mock_rpc + + request = {} + client.delete_authz_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_authz_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_authz_policy_rest_required_fields( + request_type=authz_policy.DeleteAuthzPolicyRequest, +): + transport_class = transports.NetworkSecurityRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authz_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authz_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_authz_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_authz_policy_rest_unset_required_fields(): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_authz_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_authz_policy_rest_flattened(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_authz_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/authzPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_authz_policy_rest_flattened_error(transport: str = "rest"): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_authz_policy( + authz_policy.DeleteAuthzPolicyRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkSecurityClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkSecurityClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkSecurityClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkSecurityClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworkSecurityClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkSecurityGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.NetworkSecurityGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NetworkSecurityGrpcTransport, + transports.NetworkSecurityGrpcAsyncIOTransport, + transports.NetworkSecurityRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = NetworkSecurityClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_authorization_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authorization_policies), "__call__" + ) as call: + call.return_value = authorization_policy.ListAuthorizationPoliciesResponse() + client.list_authorization_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.ListAuthorizationPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_authorization_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization_policy), "__call__" + ) as call: + call.return_value = authorization_policy.AuthorizationPolicy() + client.get_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.GetAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_authorization_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authorization_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.CreateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_authorization_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authorization_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_authorization_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authorization_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.DeleteAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backend_authentication_configs_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + call.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + client.list_backend_authentication_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backend_authentication_config_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + call.return_value = backend_authentication_config.BackendAuthenticationConfig() + client.get_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backend_authentication_config_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backend_authentication_config_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backend_authentication_config_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backend_authentication_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_server_tls_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + call.return_value = server_tls_policy.ListServerTlsPoliciesResponse() + client.list_server_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.ListServerTlsPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_server_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + call.return_value = server_tls_policy.ServerTlsPolicy() + client.get_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.GetServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_server_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_server_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_server_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.DeleteServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_client_tls_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + call.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + client.list_client_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_client_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + call.return_value = client_tls_policy.ClientTlsPolicy() + client.get_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.GetClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_client_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_client_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_client_tls_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_gateway_security_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + call.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + client.list_gateway_security_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_gateway_security_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + call.return_value = gateway_security_policy.GatewaySecurityPolicy() + client.get_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.GetGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_gateway_security_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_gateway_security_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_gateway_security_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_gateway_security_policy_rules_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + call.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + client.list_gateway_security_policy_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_gateway_security_policy_rule_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + client.get_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_gateway_security_policy_rule_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_gateway_security_policy_rule_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_gateway_security_policy_rule_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_url_lists_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + call.return_value = url_list.ListUrlListsResponse() + client.list_url_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.ListUrlListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_url_list_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + call.return_value = url_list.UrlList() + client.get_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.GetUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_url_list_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.CreateUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_url_list_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.UpdateUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_url_list_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.DeleteUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tls_inspection_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + call.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + client.list_tls_inspection_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_tls_inspection_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + call.return_value = tls_inspection_policy.TlsInspectionPolicy() + client.get_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.GetTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_tls_inspection_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_tls_inspection_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_tls_inspection_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_authz_policies_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + call.return_value = authz_policy.ListAuthzPoliciesResponse() + client.list_authz_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.ListAuthzPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_authz_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + call.return_value = authz_policy.AuthzPolicy() + client.get_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.GetAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_authz_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authz_policy.CreateAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_authz_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authz_policy.UpdateAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_authz_policy_empty_call_grpc(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.DeleteAuthzPolicyRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = NetworkSecurityAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_authorization_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authorization_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authorization_policy.ListAuthorizationPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_authorization_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.ListAuthorizationPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_authorization_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authorization_policy.AuthorizationPolicy( + name="name_value", + description="description_value", + action=authorization_policy.AuthorizationPolicy.Action.ALLOW, + ) + ) + await client.get_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.GetAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_authorization_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authorization_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.CreateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_authorization_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authorization_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_authorization_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authorization_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.DeleteAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backend_authentication_configs_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backend_authentication_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backend_authentication_config_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backend_authentication_config.BackendAuthenticationConfig( + name="name_value", + description="description_value", + client_certificate="client_certificate_value", + trust_config="trust_config_value", + well_known_roots=backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE, + etag="etag_value", + ) + ) + await client.get_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backend_authentication_config_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backend_authentication_config_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backend_authentication_config_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backend_authentication_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_server_tls_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ListServerTlsPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_server_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.ListServerTlsPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_server_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + server_tls_policy.ServerTlsPolicy( + name="name_value", + description="description_value", + allow_open=True, + ) + ) + await client.get_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.GetServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_server_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_server_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_server_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_server_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.DeleteServerTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_client_tls_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ListClientTlsPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_client_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_client_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + client_tls_policy.ClientTlsPolicy( + name="name_value", + description="description_value", + sni="sni_value", + ) + ) + await client.get_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.GetClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_client_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_client_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_client_tls_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_gateway_security_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.ListGatewaySecurityPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_gateway_security_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_gateway_security_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy.GatewaySecurityPolicy( + name="name_value", + description="description_value", + tls_inspection_policy="tls_inspection_policy_value", + ) + ) + await client.get_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.GetGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_gateway_security_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_gateway_security_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_gateway_security_policy_rules_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_gateway_security_policy_rules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_gateway_security_policy_rule_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gateway_security_policy_rule.GatewaySecurityPolicyRule( + name="name_value", + enabled=True, + priority=898, + description="description_value", + session_matcher="session_matcher_value", + application_matcher="application_matcher_value", + tls_inspection_enabled=True, + ) + ) + await client.get_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_gateway_security_policy_rule_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_gateway_security_policy_rule_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_gateway_security_policy_rule_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_url_lists_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.ListUrlListsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_url_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.ListUrlListsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_url_list_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + url_list.UrlList( + name="name_value", + description="description_value", + values=["values_value"], + ) + ) + await client.get_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.GetUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_url_list_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.CreateUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_url_list_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.UpdateUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_url_list_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.DeleteUrlListRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_tls_inspection_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.ListTlsInspectionPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_tls_inspection_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_tls_inspection_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tls_inspection_policy.TlsInspectionPolicy( + name="name_value", + description="description_value", + ca_pool="ca_pool_value", + trust_config="trust_config_value", + exclude_public_ca_set=True, + min_tls_version=tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0, + tls_feature_profile=tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE, + custom_tls_features=["custom_tls_features_value"], + ) + ) + await client.get_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.GetTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_tls_inspection_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_tls_inspection_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_tls_inspection_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_authz_policies_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.ListAuthzPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_authz_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.ListAuthzPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_authz_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + authz_policy.AuthzPolicy( + name="name_value", + description="description_value", + action=authz_policy.AuthzPolicy.AuthzAction.ALLOW, + ) + ) + await client.get_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.GetAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_authz_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authz_policy.CreateAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_authz_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authz_policy.UpdateAuthzPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_authz_policy_empty_call_grpc_asyncio(): + client = NetworkSecurityAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_authz_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authz_policy.DeleteAuthzPolicyRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = NetworkSecurityClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_authorization_policies_rest_bad_request( + request_type=authorization_policy.ListAuthorizationPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_authorization_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authorization_policy.ListAuthorizationPoliciesRequest, + dict, + ], +) +def test_list_authorization_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authorization_policy.ListAuthorizationPoliciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authorization_policy.ListAuthorizationPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_authorization_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthorizationPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_authorization_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_authorization_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_authorization_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_authorization_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authorization_policy.ListAuthorizationPoliciesRequest.pb( + authorization_policy.ListAuthorizationPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = authorization_policy.ListAuthorizationPoliciesResponse.to_json( + authorization_policy.ListAuthorizationPoliciesResponse() + ) + req.return_value.content = return_value + + request = authorization_policy.ListAuthorizationPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = authorization_policy.ListAuthorizationPoliciesResponse() + post_with_metadata.return_value = ( + authorization_policy.ListAuthorizationPoliciesResponse(), + metadata, + ) + + client.list_authorization_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_authorization_policy_rest_bad_request( + request_type=authorization_policy.GetAuthorizationPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_authorization_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authorization_policy.GetAuthorizationPolicyRequest, + dict, + ], +) +def test_get_authorization_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authorization_policy.AuthorizationPolicy( + name="name_value", + description="description_value", + action=authorization_policy.AuthorizationPolicy.Action.ALLOW, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authorization_policy.AuthorizationPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_authorization_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, authorization_policy.AuthorizationPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.action == authorization_policy.AuthorizationPolicy.Action.ALLOW + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_authorization_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_authorization_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_authorization_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_authorization_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authorization_policy.GetAuthorizationPolicyRequest.pb( + authorization_policy.GetAuthorizationPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = authorization_policy.AuthorizationPolicy.to_json( + authorization_policy.AuthorizationPolicy() + ) + req.return_value.content = return_value + + request = authorization_policy.GetAuthorizationPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = authorization_policy.AuthorizationPolicy() + post_with_metadata.return_value = ( + authorization_policy.AuthorizationPolicy(), + metadata, + ) + + client.get_authorization_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_authorization_policy_rest_bad_request( + request_type=gcn_authorization_policy.CreateAuthorizationPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_authorization_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authorization_policy.CreateAuthorizationPolicyRequest, + dict, + ], +) +def test_create_authorization_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["authorization_policy"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "action": 1, + "rules": [ + { + "sources": [ + { + "principals": ["principals_value1", "principals_value2"], + "ip_blocks": ["ip_blocks_value1", "ip_blocks_value2"], + } + ], + "destinations": [ + { + "hosts": ["hosts_value1", "hosts_value2"], + "ports": [569, 570], + "methods": ["methods_value1", "methods_value2"], + "http_header_match": { + "regex_match": "regex_match_value", + "header_name": "header_name_value", + }, + } + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_authorization_policy.CreateAuthorizationPolicyRequest.meta.fields[ + "authorization_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "authorization_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authorization_policy"][field])): + del request_init["authorization_policy"][field][i][subfield] + else: + del request_init["authorization_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_authorization_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_authorization_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_authorization_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_authorization_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_authorization_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_authorization_policy.CreateAuthorizationPolicyRequest.pb( + gcn_authorization_policy.CreateAuthorizationPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_authorization_policy.CreateAuthorizationPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_authorization_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_authorization_policy_rest_bad_request( + request_type=gcn_authorization_policy.UpdateAuthorizationPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "authorization_policy": { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_authorization_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authorization_policy.UpdateAuthorizationPolicyRequest, + dict, + ], +) +def test_update_authorization_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "authorization_policy": { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + } + request_init["authorization_policy"] = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "action": 1, + "rules": [ + { + "sources": [ + { + "principals": ["principals_value1", "principals_value2"], + "ip_blocks": ["ip_blocks_value1", "ip_blocks_value2"], + } + ], + "destinations": [ + { + "hosts": ["hosts_value1", "hosts_value2"], + "ports": [569, 570], + "methods": ["methods_value1", "methods_value2"], + "http_header_match": { + "regex_match": "regex_match_value", + "header_name": "header_name_value", + }, + } + ], + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_authorization_policy.UpdateAuthorizationPolicyRequest.meta.fields[ + "authorization_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "authorization_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authorization_policy"][field])): + del request_init["authorization_policy"][field][i][subfield] + else: + del request_init["authorization_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_authorization_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_authorization_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_authorization_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_authorization_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_authorization_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_authorization_policy.UpdateAuthorizationPolicyRequest.pb( + gcn_authorization_policy.UpdateAuthorizationPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_authorization_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_authorization_policy_rest_bad_request( + request_type=authorization_policy.DeleteAuthorizationPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_authorization_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authorization_policy.DeleteAuthorizationPolicyRequest, + dict, + ], +) +def test_delete_authorization_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_authorization_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_authorization_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_authorization_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_authorization_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_authorization_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authorization_policy.DeleteAuthorizationPolicyRequest.pb( + authorization_policy.DeleteAuthorizationPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = authorization_policy.DeleteAuthorizationPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_authorization_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backend_authentication_configs_rest_bad_request( + request_type=backend_authentication_config.ListBackendAuthenticationConfigsRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backend_authentication_configs(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.ListBackendAuthenticationConfigsRequest, + dict, + ], +) +def test_list_backend_authentication_configs_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backend_authentication_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackendAuthenticationConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backend_authentication_configs_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_backend_authentication_configs", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_backend_authentication_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_list_backend_authentication_configs", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest.pb( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backend_authentication_config.ListBackendAuthenticationConfigsResponse.to_json( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + req.return_value.content = return_value + + request = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse() + ) + post_with_metadata.return_value = ( + backend_authentication_config.ListBackendAuthenticationConfigsResponse(), + metadata, + ) + + client.list_backend_authentication_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backend_authentication_config_rest_bad_request( + request_type=backend_authentication_config.GetBackendAuthenticationConfigRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backend_authentication_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.GetBackendAuthenticationConfigRequest, + dict, + ], +) +def test_get_backend_authentication_config_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backend_authentication_config.BackendAuthenticationConfig( + name="name_value", + description="description_value", + client_certificate="client_certificate_value", + trust_config="trust_config_value", + well_known_roots=backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backend_authentication_config.BackendAuthenticationConfig.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backend_authentication_config(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, backend_authentication_config.BackendAuthenticationConfig + ) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.client_certificate == "client_certificate_value" + assert response.trust_config == "trust_config_value" + assert ( + response.well_known_roots + == backend_authentication_config.BackendAuthenticationConfig.WellKnownRoots.NONE + ) + assert response.etag == "etag_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backend_authentication_config_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_backend_authentication_config", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_backend_authentication_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_get_backend_authentication_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest.pb( + backend_authentication_config.GetBackendAuthenticationConfigRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + backend_authentication_config.BackendAuthenticationConfig.to_json( + backend_authentication_config.BackendAuthenticationConfig() + ) + ) + req.return_value.content = return_value + + request = backend_authentication_config.GetBackendAuthenticationConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backend_authentication_config.BackendAuthenticationConfig() + post_with_metadata.return_value = ( + backend_authentication_config.BackendAuthenticationConfig(), + metadata, + ) + + client.get_backend_authentication_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backend_authentication_config_rest_bad_request( + request_type=gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backend_authentication_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest, + dict, + ], +) +def test_create_backend_authentication_config_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backend_authentication_config"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "client_certificate": "client_certificate_value", + "trust_config": "trust_config_value", + "well_known_roots": 1, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest.meta.fields[ + "backend_authentication_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_authentication_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["backend_authentication_config"][field]) + ): + del request_init["backend_authentication_config"][field][i][ + subfield + ] + else: + del request_init["backend_authentication_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backend_authentication_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backend_authentication_config_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_backend_authentication_config", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_backend_authentication_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_create_backend_authentication_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest.pb( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_backend_authentication_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_backend_authentication_config_rest_bad_request( + request_type=gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "backend_authentication_config": { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backend_authentication_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest, + dict, + ], +) +def test_update_backend_authentication_config_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "backend_authentication_config": { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + } + request_init["backend_authentication_config"] = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "client_certificate": "client_certificate_value", + "trust_config": "trust_config_value", + "well_known_roots": 1, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest.meta.fields[ + "backend_authentication_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_authentication_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["backend_authentication_config"][field]) + ): + del request_init["backend_authentication_config"][field][i][ + subfield + ] + else: + del request_init["backend_authentication_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backend_authentication_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backend_authentication_config_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_backend_authentication_config", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_backend_authentication_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_update_backend_authentication_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest.pb( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_backend_authentication_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_backend_authentication_config_rest_bad_request( + request_type=backend_authentication_config.DeleteBackendAuthenticationConfigRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backend_authentication_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backend_authentication_config.DeleteBackendAuthenticationConfigRequest, + dict, + ], +) +def test_delete_backend_authentication_config_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backendAuthenticationConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backend_authentication_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backend_authentication_config_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_backend_authentication_config", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_backend_authentication_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_delete_backend_authentication_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest.pb( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_backend_authentication_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_server_tls_policies_rest_bad_request( + request_type=server_tls_policy.ListServerTlsPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_server_tls_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + server_tls_policy.ListServerTlsPoliciesRequest, + dict, + ], +) +def test_list_server_tls_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ListServerTlsPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = server_tls_policy.ListServerTlsPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_server_tls_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServerTlsPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_server_tls_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_server_tls_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_server_tls_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_server_tls_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = server_tls_policy.ListServerTlsPoliciesRequest.pb( + server_tls_policy.ListServerTlsPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = server_tls_policy.ListServerTlsPoliciesResponse.to_json( + server_tls_policy.ListServerTlsPoliciesResponse() + ) + req.return_value.content = return_value + + request = server_tls_policy.ListServerTlsPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = server_tls_policy.ListServerTlsPoliciesResponse() + post_with_metadata.return_value = ( + server_tls_policy.ListServerTlsPoliciesResponse(), + metadata, + ) + + client.list_server_tls_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_server_tls_policy_rest_bad_request( + request_type=server_tls_policy.GetServerTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_server_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + server_tls_policy.GetServerTlsPolicyRequest, + dict, + ], +) +def test_get_server_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = server_tls_policy.ServerTlsPolicy( + name="name_value", + description="description_value", + allow_open=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = server_tls_policy.ServerTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_server_tls_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, server_tls_policy.ServerTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.allow_open is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_server_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_server_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_server_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_server_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = server_tls_policy.GetServerTlsPolicyRequest.pb( + server_tls_policy.GetServerTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = server_tls_policy.ServerTlsPolicy.to_json( + server_tls_policy.ServerTlsPolicy() + ) + req.return_value.content = return_value + + request = server_tls_policy.GetServerTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = server_tls_policy.ServerTlsPolicy() + post_with_metadata.return_value = server_tls_policy.ServerTlsPolicy(), metadata + + client.get_server_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_server_tls_policy_rest_bad_request( + request_type=gcn_server_tls_policy.CreateServerTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_server_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_server_tls_policy.CreateServerTlsPolicyRequest, + dict, + ], +) +def test_create_server_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["server_tls_policy"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "allow_open": True, + "server_certificate": { + "local_filepath": { + "certificate_path": "certificate_path_value", + "private_key_path": "private_key_path_value", + }, + "grpc_endpoint": {"target_uri": "target_uri_value"}, + "certificate_provider_instance": { + "plugin_instance": "plugin_instance_value" + }, + }, + "mtls_policy": { + "client_validation_mode": 1, + "client_validation_ca": [ + { + "ca_cert_path": "ca_cert_path_value", + "grpc_endpoint": {}, + "certificate_provider_instance": {}, + } + ], + "client_validation_trust_config": "client_validation_trust_config_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_server_tls_policy.CreateServerTlsPolicyRequest.meta.fields[ + "server_tls_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["server_tls_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["server_tls_policy"][field])): + del request_init["server_tls_policy"][field][i][subfield] + else: + del request_init["server_tls_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_server_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_server_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_server_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_server_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_server_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_server_tls_policy.CreateServerTlsPolicyRequest.pb( + gcn_server_tls_policy.CreateServerTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_server_tls_policy.CreateServerTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_server_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_server_tls_policy_rest_bad_request( + request_type=gcn_server_tls_policy.UpdateServerTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "server_tls_policy": { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_server_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_server_tls_policy.UpdateServerTlsPolicyRequest, + dict, + ], +) +def test_update_server_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "server_tls_policy": { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + } + request_init["server_tls_policy"] = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "allow_open": True, + "server_certificate": { + "local_filepath": { + "certificate_path": "certificate_path_value", + "private_key_path": "private_key_path_value", + }, + "grpc_endpoint": {"target_uri": "target_uri_value"}, + "certificate_provider_instance": { + "plugin_instance": "plugin_instance_value" + }, + }, + "mtls_policy": { + "client_validation_mode": 1, + "client_validation_ca": [ + { + "ca_cert_path": "ca_cert_path_value", + "grpc_endpoint": {}, + "certificate_provider_instance": {}, + } + ], + "client_validation_trust_config": "client_validation_trust_config_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_server_tls_policy.UpdateServerTlsPolicyRequest.meta.fields[ + "server_tls_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["server_tls_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["server_tls_policy"][field])): + del request_init["server_tls_policy"][field][i][subfield] + else: + del request_init["server_tls_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_server_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_server_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_server_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_server_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_server_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_server_tls_policy.UpdateServerTlsPolicyRequest.pb( + gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_server_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_server_tls_policy_rest_bad_request( + request_type=server_tls_policy.DeleteServerTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_server_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + server_tls_policy.DeleteServerTlsPolicyRequest, + dict, + ], +) +def test_delete_server_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/serverTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_server_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_server_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_server_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_server_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_server_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = server_tls_policy.DeleteServerTlsPolicyRequest.pb( + server_tls_policy.DeleteServerTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = server_tls_policy.DeleteServerTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_server_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_client_tls_policies_rest_bad_request( + request_type=client_tls_policy.ListClientTlsPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_client_tls_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.ListClientTlsPoliciesRequest, + dict, + ], +) +def test_list_client_tls_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ListClientTlsPoliciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = client_tls_policy.ListClientTlsPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_client_tls_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClientTlsPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_client_tls_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_client_tls_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_client_tls_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_client_tls_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = client_tls_policy.ListClientTlsPoliciesRequest.pb( + client_tls_policy.ListClientTlsPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = client_tls_policy.ListClientTlsPoliciesResponse.to_json( + client_tls_policy.ListClientTlsPoliciesResponse() + ) + req.return_value.content = return_value + + request = client_tls_policy.ListClientTlsPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = client_tls_policy.ListClientTlsPoliciesResponse() + post_with_metadata.return_value = ( + client_tls_policy.ListClientTlsPoliciesResponse(), + metadata, + ) + + client.list_client_tls_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_client_tls_policy_rest_bad_request( + request_type=client_tls_policy.GetClientTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_client_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.GetClientTlsPolicyRequest, + dict, + ], +) +def test_get_client_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = client_tls_policy.ClientTlsPolicy( + name="name_value", + description="description_value", + sni="sni_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = client_tls_policy.ClientTlsPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_client_tls_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, client_tls_policy.ClientTlsPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.sni == "sni_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_client_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_client_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_client_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_client_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = client_tls_policy.GetClientTlsPolicyRequest.pb( + client_tls_policy.GetClientTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = client_tls_policy.ClientTlsPolicy.to_json( + client_tls_policy.ClientTlsPolicy() + ) + req.return_value.content = return_value + + request = client_tls_policy.GetClientTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = client_tls_policy.ClientTlsPolicy() + post_with_metadata.return_value = client_tls_policy.ClientTlsPolicy(), metadata + + client.get_client_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_client_tls_policy_rest_bad_request( + request_type=gcn_client_tls_policy.CreateClientTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_client_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_client_tls_policy.CreateClientTlsPolicyRequest, + dict, + ], +) +def test_create_client_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["client_tls_policy"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "sni": "sni_value", + "client_certificate": { + "local_filepath": { + "certificate_path": "certificate_path_value", + "private_key_path": "private_key_path_value", + }, + "grpc_endpoint": {"target_uri": "target_uri_value"}, + "certificate_provider_instance": { + "plugin_instance": "plugin_instance_value" + }, + }, + "server_validation_ca": [ + { + "ca_cert_path": "ca_cert_path_value", + "grpc_endpoint": {}, + "certificate_provider_instance": {}, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_client_tls_policy.CreateClientTlsPolicyRequest.meta.fields[ + "client_tls_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["client_tls_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["client_tls_policy"][field])): + del request_init["client_tls_policy"][field][i][subfield] + else: + del request_init["client_tls_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_client_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_client_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_client_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_client_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_client_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_client_tls_policy.CreateClientTlsPolicyRequest.pb( + gcn_client_tls_policy.CreateClientTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_client_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_client_tls_policy_rest_bad_request( + request_type=gcn_client_tls_policy.UpdateClientTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "client_tls_policy": { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_client_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_client_tls_policy.UpdateClientTlsPolicyRequest, + dict, + ], +) +def test_update_client_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "client_tls_policy": { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + } + request_init["client_tls_policy"] = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "sni": "sni_value", + "client_certificate": { + "local_filepath": { + "certificate_path": "certificate_path_value", + "private_key_path": "private_key_path_value", + }, + "grpc_endpoint": {"target_uri": "target_uri_value"}, + "certificate_provider_instance": { + "plugin_instance": "plugin_instance_value" + }, + }, + "server_validation_ca": [ + { + "ca_cert_path": "ca_cert_path_value", + "grpc_endpoint": {}, + "certificate_provider_instance": {}, + } + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.meta.fields[ + "client_tls_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["client_tls_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["client_tls_policy"][field])): + del request_init["client_tls_policy"][field][i][subfield] + else: + del request_init["client_tls_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_client_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_client_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_client_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_client_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_client_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.pb( + gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_client_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_client_tls_policy_rest_bad_request( + request_type=client_tls_policy.DeleteClientTlsPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_client_tls_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + client_tls_policy.DeleteClientTlsPolicyRequest, + dict, + ], +) +def test_delete_client_tls_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_client_tls_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_client_tls_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_client_tls_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_client_tls_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_client_tls_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = client_tls_policy.DeleteClientTlsPolicyRequest.pb( + client_tls_policy.DeleteClientTlsPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = client_tls_policy.DeleteClientTlsPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_client_tls_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_gateway_security_policies_rest_bad_request( + request_type=gateway_security_policy.ListGatewaySecurityPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_gateway_security_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.ListGatewaySecurityPoliciesRequest, + dict, + ], +) +def test_list_gateway_security_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gateway_security_policy.ListGatewaySecurityPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_gateway_security_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_gateway_security_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_gateway_security_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_gateway_security_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_gateway_security_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gateway_security_policy.ListGatewaySecurityPoliciesRequest.pb( + gateway_security_policy.ListGatewaySecurityPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse.to_json( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + ) + req.return_value.content = return_value + + request = gateway_security_policy.ListGatewaySecurityPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse() + ) + post_with_metadata.return_value = ( + gateway_security_policy.ListGatewaySecurityPoliciesResponse(), + metadata, + ) + + client.list_gateway_security_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_gateway_security_policy_rest_bad_request( + request_type=gateway_security_policy.GetGatewaySecurityPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_gateway_security_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.GetGatewaySecurityPolicyRequest, + dict, + ], +) +def test_get_gateway_security_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy.GatewaySecurityPolicy( + name="name_value", + description="description_value", + tls_inspection_policy="tls_inspection_policy_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gateway_security_policy.GatewaySecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_gateway_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy.GatewaySecurityPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.tls_inspection_policy == "tls_inspection_policy_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_gateway_security_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_gateway_security_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_gateway_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_gateway_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gateway_security_policy.GetGatewaySecurityPolicyRequest.pb( + gateway_security_policy.GetGatewaySecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gateway_security_policy.GatewaySecurityPolicy.to_json( + gateway_security_policy.GatewaySecurityPolicy() + ) + req.return_value.content = return_value + + request = gateway_security_policy.GetGatewaySecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gateway_security_policy.GatewaySecurityPolicy() + post_with_metadata.return_value = ( + gateway_security_policy.GatewaySecurityPolicy(), + metadata, + ) + + client.get_gateway_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_gateway_security_policy_rest_bad_request( + request_type=gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_gateway_security_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest, + dict, + ], +) +def test_create_gateway_security_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["gateway_security_policy"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "tls_inspection_policy": "tls_inspection_policy_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest.meta.fields[ + "gateway_security_policy" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "gateway_security_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["gateway_security_policy"][field])): + del request_init["gateway_security_policy"][field][i][subfield] + else: + del request_init["gateway_security_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_gateway_security_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_gateway_security_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_gateway_security_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_gateway_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_gateway_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest.pb( + gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_gateway_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_gateway_security_policy_rest_bad_request( + request_type=gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "gateway_security_policy": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_gateway_security_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest, + dict, + ], +) +def test_update_gateway_security_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "gateway_security_policy": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + } + request_init["gateway_security_policy"] = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "tls_inspection_policy": "tls_inspection_policy_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest.meta.fields[ + "gateway_security_policy" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "gateway_security_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["gateway_security_policy"][field])): + del request_init["gateway_security_policy"][field][i][subfield] + else: + del request_init["gateway_security_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_gateway_security_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_gateway_security_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_gateway_security_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_gateway_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_gateway_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest.pb( + gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_gateway_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_gateway_security_policy_rest_bad_request( + request_type=gateway_security_policy.DeleteGatewaySecurityPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_gateway_security_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy.DeleteGatewaySecurityPolicyRequest, + dict, + ], +) +def test_delete_gateway_security_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_gateway_security_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_gateway_security_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_gateway_security_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_gateway_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_gateway_security_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gateway_security_policy.DeleteGatewaySecurityPolicyRequest.pb( + gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_gateway_security_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_gateway_security_policy_rules_rest_bad_request( + request_type=gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_gateway_security_policy_rules(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest, + dict, + ], +) +def test_list_gateway_security_policy_rules_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_gateway_security_policy_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGatewaySecurityPolicyRulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_gateway_security_policy_rules_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_gateway_security_policy_rules", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_gateway_security_policy_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_list_gateway_security_policy_rules", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest.pb( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse.to_json( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + ) + req.return_value.content = return_value + + request = gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse() + ) + post_with_metadata.return_value = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesResponse(), + metadata, + ) + + client.list_gateway_security_policy_rules( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_gateway_security_policy_rule_rest_bad_request( + request_type=gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_gateway_security_policy_rule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_get_gateway_security_policy_rule_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule( + name="name_value", + enabled=True, + priority=898, + description="description_value", + session_matcher="session_matcher_value", + application_matcher="application_matcher_value", + tls_inspection_enabled=True, + basic_profile=gateway_security_policy_rule.GatewaySecurityPolicyRule.BasicProfile.ALLOW, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_gateway_security_policy_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gateway_security_policy_rule.GatewaySecurityPolicyRule) + assert response.name == "name_value" + assert response.enabled is True + assert response.priority == 898 + assert response.description == "description_value" + assert response.session_matcher == "session_matcher_value" + assert response.application_matcher == "application_matcher_value" + assert response.tls_inspection_enabled is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_gateway_security_policy_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_gateway_security_policy_rule", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_gateway_security_policy_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_get_gateway_security_policy_rule", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest.pb( + gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule.to_json( + gateway_security_policy_rule.GatewaySecurityPolicyRule() + ) + req.return_value.content = return_value + + request = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gateway_security_policy_rule.GatewaySecurityPolicyRule() + post_with_metadata.return_value = ( + gateway_security_policy_rule.GatewaySecurityPolicyRule(), + metadata, + ) + + client.get_gateway_security_policy_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_gateway_security_policy_rule_rest_bad_request( + request_type=gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_gateway_security_policy_rule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_create_gateway_security_policy_rule_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3" + } + request_init["gateway_security_policy_rule"] = { + "basic_profile": 1, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "enabled": True, + "priority": 898, + "description": "description_value", + "session_matcher": "session_matcher_value", + "application_matcher": "application_matcher_value", + "tls_inspection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest.meta.fields[ + "gateway_security_policy_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "gateway_security_policy_rule" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["gateway_security_policy_rule"][field]) + ): + del request_init["gateway_security_policy_rule"][field][i][subfield] + else: + del request_init["gateway_security_policy_rule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_gateway_security_policy_rule(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_gateway_security_policy_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_gateway_security_policy_rule", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_gateway_security_policy_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_create_gateway_security_policy_rule", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest.pb( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_gateway_security_policy_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_gateway_security_policy_rule_rest_bad_request( + request_type=gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "gateway_security_policy_rule": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_gateway_security_policy_rule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_update_gateway_security_policy_rule_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "gateway_security_policy_rule": { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + } + request_init["gateway_security_policy_rule"] = { + "basic_profile": 1, + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "enabled": True, + "priority": 898, + "description": "description_value", + "session_matcher": "session_matcher_value", + "application_matcher": "application_matcher_value", + "tls_inspection_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest.meta.fields[ + "gateway_security_policy_rule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "gateway_security_policy_rule" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["gateway_security_policy_rule"][field]) + ): + del request_init["gateway_security_policy_rule"][field][i][subfield] + else: + del request_init["gateway_security_policy_rule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_gateway_security_policy_rule(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_gateway_security_policy_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_gateway_security_policy_rule", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_gateway_security_policy_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_update_gateway_security_policy_rule", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest.pb( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_gateway_security_policy_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_gateway_security_policy_rule_rest_bad_request( + request_type=gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_gateway_security_policy_rule(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest, + dict, + ], +) +def test_delete_gateway_security_policy_rule_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/gatewaySecurityPolicies/sample3/rules/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_gateway_security_policy_rule(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_gateway_security_policy_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_gateway_security_policy_rule", + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_gateway_security_policy_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "pre_delete_gateway_security_policy_rule", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest.pb( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_gateway_security_policy_rule( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_url_lists_rest_bad_request(request_type=url_list.ListUrlListsRequest): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_url_lists(request) + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.ListUrlListsRequest, + dict, + ], +) +def test_list_url_lists_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = url_list.ListUrlListsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = url_list.ListUrlListsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_url_lists(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUrlListsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_url_lists_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_url_lists" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_url_lists_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_url_lists" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = url_list.ListUrlListsRequest.pb(url_list.ListUrlListsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = url_list.ListUrlListsResponse.to_json( + url_list.ListUrlListsResponse() + ) + req.return_value.content = return_value + + request = url_list.ListUrlListsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = url_list.ListUrlListsResponse() + post_with_metadata.return_value = url_list.ListUrlListsResponse(), metadata + + client.list_url_lists( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_url_list_rest_bad_request(request_type=url_list.GetUrlListRequest): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_url_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.GetUrlListRequest, + dict, + ], +) +def test_get_url_list_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = url_list.UrlList( + name="name_value", + description="description_value", + values=["values_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = url_list.UrlList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_url_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, url_list.UrlList) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.values == ["values_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_url_list_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_url_list" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_url_list_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_url_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = url_list.GetUrlListRequest.pb(url_list.GetUrlListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = url_list.UrlList.to_json(url_list.UrlList()) + req.return_value.content = return_value + + request = url_list.GetUrlListRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = url_list.UrlList() + post_with_metadata.return_value = url_list.UrlList(), metadata + + client.get_url_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_url_list_rest_bad_request( + request_type=gcn_url_list.CreateUrlListRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_url_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_url_list.CreateUrlListRequest, + dict, + ], +) +def test_create_url_list_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["url_list"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "values": ["values_value1", "values_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_url_list.CreateUrlListRequest.meta.fields["url_list"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_list"][field])): + del request_init["url_list"][field][i][subfield] + else: + del request_init["url_list"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_url_list(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_url_list_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_url_list" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_url_list_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_url_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_url_list.CreateUrlListRequest.pb( + gcn_url_list.CreateUrlListRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_url_list.CreateUrlListRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_url_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_url_list_rest_bad_request( + request_type=gcn_url_list.UpdateUrlListRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "url_list": {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_url_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_url_list.UpdateUrlListRequest, + dict, + ], +) +def test_update_url_list_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "url_list": {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + } + request_init["url_list"] = { + "name": "projects/sample1/locations/sample2/urlLists/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "values": ["values_value1", "values_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_url_list.UpdateUrlListRequest.meta.fields["url_list"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["url_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["url_list"][field])): + del request_init["url_list"][field][i][subfield] + else: + del request_init["url_list"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_url_list(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_url_list_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_url_list" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_url_list_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_url_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_url_list.UpdateUrlListRequest.pb( + gcn_url_list.UpdateUrlListRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_url_list.UpdateUrlListRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_url_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_url_list_rest_bad_request(request_type=url_list.DeleteUrlListRequest): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_url_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + url_list.DeleteUrlListRequest, + dict, + ], +) +def test_delete_url_list_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/urlLists/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_url_list(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_url_list_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_url_list" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_url_list_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_url_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = url_list.DeleteUrlListRequest.pb(url_list.DeleteUrlListRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = url_list.DeleteUrlListRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_url_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_tls_inspection_policies_rest_bad_request( + request_type=tls_inspection_policy.ListTlsInspectionPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_tls_inspection_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.ListTlsInspectionPoliciesRequest, + dict, + ], +) +def test_list_tls_inspection_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_tls_inspection_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTlsInspectionPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tls_inspection_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_tls_inspection_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_tls_inspection_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_tls_inspection_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = tls_inspection_policy.ListTlsInspectionPoliciesRequest.pb( + tls_inspection_policy.ListTlsInspectionPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse.to_json( + tls_inspection_policy.ListTlsInspectionPoliciesResponse() + ) + req.return_value.content = return_value + + request = tls_inspection_policy.ListTlsInspectionPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tls_inspection_policy.ListTlsInspectionPoliciesResponse() + post_with_metadata.return_value = ( + tls_inspection_policy.ListTlsInspectionPoliciesResponse(), + metadata, + ) + + client.list_tls_inspection_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_tls_inspection_policy_rest_bad_request( + request_type=tls_inspection_policy.GetTlsInspectionPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_tls_inspection_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.GetTlsInspectionPolicyRequest, + dict, + ], +) +def test_get_tls_inspection_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tls_inspection_policy.TlsInspectionPolicy( + name="name_value", + description="description_value", + ca_pool="ca_pool_value", + trust_config="trust_config_value", + exclude_public_ca_set=True, + min_tls_version=tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0, + tls_feature_profile=tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE, + custom_tls_features=["custom_tls_features_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tls_inspection_policy.TlsInspectionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_tls_inspection_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tls_inspection_policy.TlsInspectionPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.ca_pool == "ca_pool_value" + assert response.trust_config == "trust_config_value" + assert response.exclude_public_ca_set is True + assert ( + response.min_tls_version + == tls_inspection_policy.TlsInspectionPolicy.TlsVersion.TLS_1_0 + ) + assert ( + response.tls_feature_profile + == tls_inspection_policy.TlsInspectionPolicy.Profile.PROFILE_COMPATIBLE + ) + assert response.custom_tls_features == ["custom_tls_features_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_tls_inspection_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_tls_inspection_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_get_tls_inspection_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_tls_inspection_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = tls_inspection_policy.GetTlsInspectionPolicyRequest.pb( + tls_inspection_policy.GetTlsInspectionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = tls_inspection_policy.TlsInspectionPolicy.to_json( + tls_inspection_policy.TlsInspectionPolicy() + ) + req.return_value.content = return_value + + request = tls_inspection_policy.GetTlsInspectionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tls_inspection_policy.TlsInspectionPolicy() + post_with_metadata.return_value = ( + tls_inspection_policy.TlsInspectionPolicy(), + metadata, + ) + + client.get_tls_inspection_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_tls_inspection_policy_rest_bad_request( + request_type=gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_tls_inspection_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest, + dict, + ], +) +def test_create_tls_inspection_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["tls_inspection_policy"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "ca_pool": "ca_pool_value", + "trust_config": "trust_config_value", + "exclude_public_ca_set": True, + "min_tls_version": 1, + "tls_feature_profile": 1, + "custom_tls_features": [ + "custom_tls_features_value1", + "custom_tls_features_value2", + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest.meta.fields[ + "tls_inspection_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "tls_inspection_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tls_inspection_policy"][field])): + del request_init["tls_inspection_policy"][field][i][subfield] + else: + del request_init["tls_inspection_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_tls_inspection_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_tls_inspection_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_tls_inspection_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_tls_inspection_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_tls_inspection_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest.pb( + gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_tls_inspection_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_tls_inspection_policy_rest_bad_request( + request_type=gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "tls_inspection_policy": { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_tls_inspection_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest, + dict, + ], +) +def test_update_tls_inspection_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "tls_inspection_policy": { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + } + request_init["tls_inspection_policy"] = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "ca_pool": "ca_pool_value", + "trust_config": "trust_config_value", + "exclude_public_ca_set": True, + "min_tls_version": 1, + "tls_feature_profile": 1, + "custom_tls_features": [ + "custom_tls_features_value1", + "custom_tls_features_value2", + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest.meta.fields[ + "tls_inspection_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "tls_inspection_policy" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tls_inspection_policy"][field])): + del request_init["tls_inspection_policy"][field][i][subfield] + else: + del request_init["tls_inspection_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_tls_inspection_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_tls_inspection_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_tls_inspection_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_tls_inspection_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_tls_inspection_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest.pb( + gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_tls_inspection_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_tls_inspection_policy_rest_bad_request( + request_type=tls_inspection_policy.DeleteTlsInspectionPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_tls_inspection_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + tls_inspection_policy.DeleteTlsInspectionPolicyRequest, + dict, + ], +) +def test_delete_tls_inspection_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/tlsInspectionPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_tls_inspection_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_tls_inspection_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_tls_inspection_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_tls_inspection_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_tls_inspection_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = tls_inspection_policy.DeleteTlsInspectionPolicyRequest.pb( + tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_tls_inspection_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_authz_policies_rest_bad_request( + request_type=authz_policy.ListAuthzPoliciesRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_authz_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.ListAuthzPoliciesRequest, + dict, + ], +) +def test_list_authz_policies_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authz_policy.ListAuthzPoliciesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authz_policy.ListAuthzPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_authz_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthzPoliciesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_authz_policies_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_list_authz_policies" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_list_authz_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_list_authz_policies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authz_policy.ListAuthzPoliciesRequest.pb( + authz_policy.ListAuthzPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = authz_policy.ListAuthzPoliciesResponse.to_json( + authz_policy.ListAuthzPoliciesResponse() + ) + req.return_value.content = return_value + + request = authz_policy.ListAuthzPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = authz_policy.ListAuthzPoliciesResponse() + post_with_metadata.return_value = ( + authz_policy.ListAuthzPoliciesResponse(), + metadata, + ) + + client.list_authz_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_authz_policy_rest_bad_request( + request_type=authz_policy.GetAuthzPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/authzPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_authz_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.GetAuthzPolicyRequest, + dict, + ], +) +def test_get_authz_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/authzPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = authz_policy.AuthzPolicy( + name="name_value", + description="description_value", + action=authz_policy.AuthzPolicy.AuthzAction.ALLOW, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = authz_policy.AuthzPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_authz_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, authz_policy.AuthzPolicy) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.action == authz_policy.AuthzPolicy.AuthzAction.ALLOW + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_authz_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_authz_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_get_authz_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_get_authz_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authz_policy.GetAuthzPolicyRequest.pb( + authz_policy.GetAuthzPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = authz_policy.AuthzPolicy.to_json(authz_policy.AuthzPolicy()) + req.return_value.content = return_value + + request = authz_policy.GetAuthzPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = authz_policy.AuthzPolicy() + post_with_metadata.return_value = authz_policy.AuthzPolicy(), metadata + + client.get_authz_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_authz_policy_rest_bad_request( + request_type=gcn_authz_policy.CreateAuthzPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_authz_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authz_policy.CreateAuthzPolicyRequest, + dict, + ], +) +def test_create_authz_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["authz_policy"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "labels": {}, + "target": { + "load_balancing_scheme": 1, + "resources": ["resources_value1", "resources_value2"], + }, + "http_rules": [ + { + "from_": { + "sources": [ + { + "principals": [ + { + "principal_selector": 1, + "principal": { + "exact": "exact_value", + "prefix": "prefix_value", + "suffix": "suffix_value", + "contains": "contains_value", + "ignore_case": True, + }, + } + ], + "ip_blocks": [{"prefix": "prefix_value", "length": 642}], + "resources": [ + { + "tag_value_id_set": {"ids": [321, 322]}, + "iam_service_account": {}, + } + ], + } + ], + "not_sources": {}, + }, + "to": { + "operations": [ + { + "header_set": { + "headers": [{"name": "name_value", "value": {}}] + }, + "hosts": {}, + "paths": {}, + "methods": ["methods_value1", "methods_value2"], + } + ], + "not_operations": {}, + }, + "when": "when_value", + } + ], + "action": 1, + "custom_provider": { + "cloud_iap": {}, + "authz_extension": {"resources": ["resources_value1", "resources_value2"]}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_authz_policy.CreateAuthzPolicyRequest.meta.fields["authz_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["authz_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authz_policy"][field])): + del request_init["authz_policy"][field][i][subfield] + else: + del request_init["authz_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_authz_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_authz_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_create_authz_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_create_authz_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_create_authz_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_authz_policy.CreateAuthzPolicyRequest.pb( + gcn_authz_policy.CreateAuthzPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_authz_policy.CreateAuthzPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_authz_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_authz_policy_rest_bad_request( + request_type=gcn_authz_policy.UpdateAuthzPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "authz_policy": { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_authz_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_authz_policy.UpdateAuthzPolicyRequest, + dict, + ], +) +def test_update_authz_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "authz_policy": { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3" + } + } + request_init["authz_policy"] = { + "name": "projects/sample1/locations/sample2/authzPolicies/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "labels": {}, + "target": { + "load_balancing_scheme": 1, + "resources": ["resources_value1", "resources_value2"], + }, + "http_rules": [ + { + "from_": { + "sources": [ + { + "principals": [ + { + "principal_selector": 1, + "principal": { + "exact": "exact_value", + "prefix": "prefix_value", + "suffix": "suffix_value", + "contains": "contains_value", + "ignore_case": True, + }, + } + ], + "ip_blocks": [{"prefix": "prefix_value", "length": 642}], + "resources": [ + { + "tag_value_id_set": {"ids": [321, 322]}, + "iam_service_account": {}, + } + ], + } + ], + "not_sources": {}, + }, + "to": { + "operations": [ + { + "header_set": { + "headers": [{"name": "name_value", "value": {}}] + }, + "hosts": {}, + "paths": {}, + "methods": ["methods_value1", "methods_value2"], + } + ], + "not_operations": {}, + }, + "when": "when_value", + } + ], + "action": 1, + "custom_provider": { + "cloud_iap": {}, + "authz_extension": {"resources": ["resources_value1", "resources_value2"]}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_authz_policy.UpdateAuthzPolicyRequest.meta.fields["authz_policy"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["authz_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authz_policy"][field])): + del request_init["authz_policy"][field][i][subfield] + else: + del request_init["authz_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_authz_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_authz_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_update_authz_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_update_authz_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_update_authz_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_authz_policy.UpdateAuthzPolicyRequest.pb( + gcn_authz_policy.UpdateAuthzPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gcn_authz_policy.UpdateAuthzPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_authz_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_authz_policy_rest_bad_request( + request_type=authz_policy.DeleteAuthzPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/authzPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_authz_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + authz_policy.DeleteAuthzPolicyRequest, + dict, + ], +) +def test_delete_authz_policy_rest_call_success(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/authzPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_authz_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_authz_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkSecurityRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkSecurityRestInterceptor(), + ) + client = NetworkSecurityClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NetworkSecurityRestInterceptor, "post_delete_authz_policy" + ) as post, mock.patch.object( + transports.NetworkSecurityRestInterceptor, + "post_delete_authz_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkSecurityRestInterceptor, "pre_delete_authz_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = authz_policy.DeleteAuthzPolicyRequest.pb( + authz_policy.DeleteAuthzPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = authz_policy.DeleteAuthzPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_authz_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj - response_value = mock.Mock() + response_value = Response() json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.request = Request() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_client_tls_policy(request) + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_authorization_policies_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authorization_policies), "__call__" + ) as call: + client.list_authorization_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.ListAuthorizationPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_authorization_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization_policy), "__call__" + ) as call: + client.get_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.GetAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_authorization_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authorization_policy), "__call__" + ) as call: + client.create_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.CreateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_authorization_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authorization_policy), "__call__" + ) as call: + client.update_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_authorization_policy.UpdateAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_authorization_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authorization_policy), "__call__" + ) as call: + client.delete_authorization_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = authorization_policy.DeleteAuthorizationPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backend_authentication_configs_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backend_authentication_configs), "__call__" + ) as call: + client.list_backend_authentication_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.ListBackendAuthenticationConfigsRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backend_authentication_config_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backend_authentication_config), "__call__" + ) as call: + client.get_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.GetBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backend_authentication_config_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backend_authentication_config), "__call__" + ) as call: + client.create_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.CreateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backend_authentication_config_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backend_authentication_config), "__call__" + ) as call: + client.update_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_backend_authentication_config.UpdateBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backend_authentication_config_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backend_authentication_config), "__call__" + ) as call: + client.delete_backend_authentication_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backend_authentication_config.DeleteBackendAuthenticationConfigRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_server_tls_policies_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_server_tls_policies), "__call__" + ) as call: + client.list_server_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.ListServerTlsPoliciesRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - gcn_client_tls_policy.UpdateClientTlsPolicyRequest, - dict, - ], -) -def test_update_client_tls_policy_rest_call_success(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_server_tls_policy_empty_call_rest(): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "client_tls_policy": { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - } - request_init["client_tls_policy"] = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3", - "description": "description_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "sni": "sni_value", - "client_certificate": { - "local_filepath": { - "certificate_path": "certificate_path_value", - "private_key_path": "private_key_path_value", - }, - "grpc_endpoint": {"target_uri": "target_uri_value"}, - "certificate_provider_instance": { - "plugin_instance": "plugin_instance_value" - }, - }, - "server_validation_ca": [ - { - "ca_cert_path": "ca_cert_path_value", - "grpc_endpoint": {}, - "certificate_provider_instance": {}, - } - ], - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_server_tls_policy), "__call__" + ) as call: + client.get_server_tls_policy(request=None) - # Determine if the message type is proto-plus or protobuf - test_field = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.meta.fields[ - "client_tls_policy" - ] + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.GetServerTlsPolicyRequest() - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + assert args[0] == request_msg - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_server_tls_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_server_tls_policy), "__call__" + ) as call: + client.create_server_tls_policy(request=None) - subfields_not_in_runtime = [] + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.CreateServerTlsPolicyRequest() - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["client_tls_policy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + assert args[0] == request_msg - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["client_tls_policy"][field])): - del request_init["client_tls_policy"][field][i][subfield] - else: - del request_init["client_tls_policy"][field][subfield] - request = request_type(**request_init) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_server_tls_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_server_tls_policy), "__call__" + ) as call: + client.update_server_tls_policy(request=None) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_client_tls_policy(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_server_tls_policy.UpdateServerTlsPolicyRequest() - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert args[0] == request_msg -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_client_tls_policy_rest_interceptors(null_interceptor): - transport = transports.NetworkSecurityRestTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_server_tls_policy_empty_call_rest(): + client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NetworkSecurityRestInterceptor(), + transport="rest", ) - client = NetworkSecurityClient(transport=transport) + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetworkSecurityRestInterceptor, "post_update_client_tls_policy" - ) as post, mock.patch.object( - transports.NetworkSecurityRestInterceptor, - "post_update_client_tls_policy_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "pre_update_client_tls_policy" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcn_client_tls_policy.UpdateClientTlsPolicyRequest.pb( - gcn_client_tls_policy.UpdateClientTlsPolicyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + type(client.transport.delete_server_tls_policy), "__call__" + ) as call: + client.delete_server_tls_policy(request=None) - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = server_tls_policy.DeleteServerTlsPolicyRequest() - request = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + assert args[0] == request_msg - client.update_client_tls_policy( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_client_tls_policies_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_client_tls_policies), "__call__" + ) as call: + client.list_client_tls_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_client_tls_policy_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_client_tls_policy), "__call__" + ) as call: + client.get_client_tls_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.GetClientTlsPolicyRequest() + + assert args[0] == request_msg -def test_delete_client_tls_policy_rest_bad_request( - request_type=client_tls_policy.DeleteClientTlsPolicyRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_client_tls_policy_empty_call_rest(): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_client_tls_policy(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_client_tls_policy), "__call__" + ) as call: + client.create_client_tls_policy(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() -@pytest.mark.parametrize( - "request_type", - [ - client_tls_policy.DeleteClientTlsPolicyRequest, - dict, - ], -) -def test_delete_client_tls_policy_rest_call_success(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_client_tls_policy_empty_call_rest(): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clientTlsPolicies/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_client_tls_policy), "__call__" + ) as call: + client.update_client_tls_policy(request=None) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_client_tls_policy(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert args[0] == request_msg -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_client_tls_policy_rest_interceptors(null_interceptor): - transport = transports.NetworkSecurityRestTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_client_tls_policy_empty_call_rest(): + client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NetworkSecurityRestInterceptor(), + transport="rest", ) - client = NetworkSecurityClient(transport=transport) + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.NetworkSecurityRestInterceptor, "post_delete_client_tls_policy" - ) as post, mock.patch.object( - transports.NetworkSecurityRestInterceptor, - "post_delete_client_tls_policy_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.NetworkSecurityRestInterceptor, "pre_delete_client_tls_policy" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_tls_policy.DeleteClientTlsPolicyRequest.pb( - client_tls_policy.DeleteClientTlsPolicyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_tls_policy.DeleteClientTlsPolicyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + type(client.transport.delete_client_tls_policy), "__call__" + ) as call: + client.delete_client_tls_policy(request=None) - client.delete_client_tls_policy( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + assert args[0] == request_msg -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_gateway_security_policies_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policies), "__call__" + ) as call: + client.list_gateway_security_policies(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.ListGatewaySecurityPoliciesRequest() -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_gateway_security_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy), "__call__" + ) as call: + client.get_gateway_security_policy(request=None) - response = client.get_location(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.GetGatewaySecurityPolicyRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert args[0] == request_msg -def test_list_locations_rest_bad_request( - request_type=locations_pb2.ListLocationsRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_gateway_security_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy), "__call__" + ) as call: + client.create_gateway_security_policy(request=None) -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.CreateGatewaySecurityPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_gateway_security_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy), "__call__" + ) as call: + client.update_gateway_security_policy(request=None) - response = client.list_locations(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_gateway_security_policy.UpdateGatewaySecurityPolicyRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + assert args[0] == request_msg -def test_get_iam_policy_rest_bad_request( - request_type=iam_policy_pb2.GetIamPolicyRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_gateway_security_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - }, - request, - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy), "__call__" + ) as call: + client.delete_gateway_security_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy.DeleteGatewaySecurityPolicyRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_gateway_security_policy_rules_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_gateway_security_policy_rules), "__call__" + ) as call: + client.list_gateway_security_policy_rules(request=None) - response = client.get_iam_policy(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.ListGatewaySecurityPolicyRulesRequest() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert args[0] == request_msg -def test_set_iam_policy_rest_bad_request( - request_type=iam_policy_pb2.SetIamPolicyRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_gateway_security_policy_rule_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - }, - request, - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_gateway_security_policy_rule), "__call__" + ) as call: + client.get_gateway_security_policy_rule(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gateway_security_policy_rule.GetGatewaySecurityPolicyRuleRequest() -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.SetIamPolicyRequest, - dict, - ], -) -def test_set_iam_policy_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_gateway_security_policy_rule_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_gateway_security_policy_rule), "__call__" + ) as call: + client.create_gateway_security_policy_rule(request=None) - response = client.set_iam_policy(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.CreateGatewaySecurityPolicyRuleRequest() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) + assert args[0] == request_msg -def test_test_iam_permissions_rest_bad_request( - request_type=iam_policy_pb2.TestIamPermissionsRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_gateway_security_policy_rule_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - }, - request, - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_gateway_security_policy_rule), "__call__" + ) as call: + client.update_gateway_security_policy_rule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gcn_gateway_security_policy_rule.UpdateGatewaySecurityPolicyRuleRequest() + ) + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_gateway_security_policy_rule_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = { - "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_gateway_security_policy_rule), "__call__" + ) as call: + client.delete_gateway_security_policy_rule(request=None) - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gateway_security_policy_rule.DeleteGatewaySecurityPolicyRuleRequest() + ) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + assert args[0] == request_msg - response = client.test_iam_permissions(request) - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_url_lists_empty_call_rest(): + client = NetworkSecurityClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_url_lists), "__call__") as call: + client.list_url_lists(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.ListUrlListsRequest() + + assert args[0] == request_msg -def test_cancel_operation_rest_bad_request( - request_type=operations_pb2.CancelOperationRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_url_list_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_url_list), "__call__") as call: + client.get_url_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.GetUrlListRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_url_list_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_url_list), "__call__") as call: + client.create_url_list(request=None) - response = client.cancel_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.CreateUrlListRequest() - # Establish that the response is the type that we expect. - assert response is None + assert args[0] == request_msg -def test_delete_operation_rest_bad_request( - request_type=operations_pb2.DeleteOperationRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_url_list_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_url_list), "__call__") as call: + client.update_url_list(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_url_list.UpdateUrlListRequest() -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_url_list_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = "{}" - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_url_list), "__call__") as call: + client.delete_url_list(request=None) - response = client.delete_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = url_list.DeleteUrlListRequest() - # Establish that the response is the type that we expect. - assert response is None + assert args[0] == request_msg -def test_get_operation_rest_bad_request( - request_type=operations_pb2.GetOperationRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tls_inspection_policies_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tls_inspection_policies), "__call__" + ) as call: + client.list_tls_inspection_policies(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.ListTlsInspectionPoliciesRequest() -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_tls_inspection_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_tls_inspection_policy), "__call__" + ) as call: + client.get_tls_inspection_policy(request=None) - response = client.get_operation(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.GetTlsInspectionPolicyRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + assert args[0] == request_msg -def test_list_operations_rest_bad_request( - request_type=operations_pb2.ListOperationsRequest, -): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_tls_inspection_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tls_inspection_policy), "__call__" + ) as call: + client.create_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.CreateTlsInspectionPolicyRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_tls_inspection_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tls_inspection_policy), "__call__" + ) as call: + client.update_tls_inspection_policy(request=None) - response = client.list_operations(request) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_tls_inspection_policy.UpdateTlsInspectionPolicyRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + assert args[0] == request_msg -def test_initialize_client_w_rest(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_tls_inspection_policy_empty_call_rest(): client = NetworkSecurityClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert client is not None + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tls_inspection_policy), "__call__" + ) as call: + client.delete_tls_inspection_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = tls_inspection_policy.DeleteTlsInspectionPolicyRequest() + + assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_client_tls_policies_empty_call_rest(): +def test_list_authz_policies_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5967,43 +40104,41 @@ def test_list_client_tls_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_client_tls_policies), "__call__" + type(client.transport.list_authz_policies), "__call__" ) as call: - client.list_client_tls_policies(request=None) + client.list_authz_policies(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.ListClientTlsPoliciesRequest() + request_msg = authz_policy.ListAuthzPoliciesRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_client_tls_policy_empty_call_rest(): +def test_get_authz_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_tls_policy), "__call__" - ) as call: - client.get_client_tls_policy(request=None) + with mock.patch.object(type(client.transport.get_authz_policy), "__call__") as call: + client.get_authz_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.GetClientTlsPolicyRequest() + request_msg = authz_policy.GetAuthzPolicyRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_create_client_tls_policy_empty_call_rest(): +def test_create_authz_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6011,21 +40146,21 @@ def test_create_client_tls_policy_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_client_tls_policy), "__call__" + type(client.transport.create_authz_policy), "__call__" ) as call: - client.create_client_tls_policy(request=None) + client.create_authz_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.CreateClientTlsPolicyRequest() + request_msg = gcn_authz_policy.CreateAuthzPolicyRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_update_client_tls_policy_empty_call_rest(): +def test_update_authz_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6033,21 +40168,21 @@ def test_update_client_tls_policy_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_client_tls_policy), "__call__" + type(client.transport.update_authz_policy), "__call__" ) as call: - client.update_client_tls_policy(request=None) + client.update_authz_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = gcn_client_tls_policy.UpdateClientTlsPolicyRequest() + request_msg = gcn_authz_policy.UpdateAuthzPolicyRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_client_tls_policy_empty_call_rest(): +def test_delete_authz_policy_empty_call_rest(): client = NetworkSecurityClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6055,14 +40190,14 @@ def test_delete_client_tls_policy_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_client_tls_policy), "__call__" + type(client.transport.delete_authz_policy), "__call__" ) as call: - client.delete_client_tls_policy(request=None) + client.delete_authz_policy(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = client_tls_policy.DeleteClientTlsPolicyRequest() + request_msg = authz_policy.DeleteAuthzPolicyRequest() assert args[0] == request_msg @@ -6117,11 +40252,51 @@ def test_network_security_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "list_authorization_policies", + "get_authorization_policy", + "create_authorization_policy", + "update_authorization_policy", + "delete_authorization_policy", + "list_backend_authentication_configs", + "get_backend_authentication_config", + "create_backend_authentication_config", + "update_backend_authentication_config", + "delete_backend_authentication_config", + "list_server_tls_policies", + "get_server_tls_policy", + "create_server_tls_policy", + "update_server_tls_policy", + "delete_server_tls_policy", "list_client_tls_policies", "get_client_tls_policy", "create_client_tls_policy", "update_client_tls_policy", "delete_client_tls_policy", + "list_gateway_security_policies", + "get_gateway_security_policy", + "create_gateway_security_policy", + "update_gateway_security_policy", + "delete_gateway_security_policy", + "list_gateway_security_policy_rules", + "get_gateway_security_policy_rule", + "create_gateway_security_policy_rule", + "update_gateway_security_policy_rule", + "delete_gateway_security_policy_rule", + "list_url_lists", + "get_url_list", + "create_url_list", + "update_url_list", + "delete_url_list", + "list_tls_inspection_policies", + "get_tls_inspection_policy", + "create_tls_inspection_policy", + "update_tls_inspection_policy", + "delete_tls_inspection_policy", + "list_authz_policies", + "get_authz_policy", + "create_authz_policy", + "update_authz_policy", + "delete_authz_policy", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -6394,6 +40569,51 @@ def test_network_security_client_transport_session_collision(transport_name): credentials=creds2, transport=transport_name, ) + session1 = client1.transport.list_authorization_policies._session + session2 = client2.transport.list_authorization_policies._session + assert session1 != session2 + session1 = client1.transport.get_authorization_policy._session + session2 = client2.transport.get_authorization_policy._session + assert session1 != session2 + session1 = client1.transport.create_authorization_policy._session + session2 = client2.transport.create_authorization_policy._session + assert session1 != session2 + session1 = client1.transport.update_authorization_policy._session + session2 = client2.transport.update_authorization_policy._session + assert session1 != session2 + session1 = client1.transport.delete_authorization_policy._session + session2 = client2.transport.delete_authorization_policy._session + assert session1 != session2 + session1 = client1.transport.list_backend_authentication_configs._session + session2 = client2.transport.list_backend_authentication_configs._session + assert session1 != session2 + session1 = client1.transport.get_backend_authentication_config._session + session2 = client2.transport.get_backend_authentication_config._session + assert session1 != session2 + session1 = client1.transport.create_backend_authentication_config._session + session2 = client2.transport.create_backend_authentication_config._session + assert session1 != session2 + session1 = client1.transport.update_backend_authentication_config._session + session2 = client2.transport.update_backend_authentication_config._session + assert session1 != session2 + session1 = client1.transport.delete_backend_authentication_config._session + session2 = client2.transport.delete_backend_authentication_config._session + assert session1 != session2 + session1 = client1.transport.list_server_tls_policies._session + session2 = client2.transport.list_server_tls_policies._session + assert session1 != session2 + session1 = client1.transport.get_server_tls_policy._session + session2 = client2.transport.get_server_tls_policy._session + assert session1 != session2 + session1 = client1.transport.create_server_tls_policy._session + session2 = client2.transport.create_server_tls_policy._session + assert session1 != session2 + session1 = client1.transport.update_server_tls_policy._session + session2 = client2.transport.update_server_tls_policy._session + assert session1 != session2 + session1 = client1.transport.delete_server_tls_policy._session + session2 = client2.transport.delete_server_tls_policy._session + assert session1 != session2 session1 = client1.transport.list_client_tls_policies._session session2 = client2.transport.list_client_tls_policies._session assert session1 != session2 @@ -6409,6 +40629,81 @@ def test_network_security_client_transport_session_collision(transport_name): session1 = client1.transport.delete_client_tls_policy._session session2 = client2.transport.delete_client_tls_policy._session assert session1 != session2 + session1 = client1.transport.list_gateway_security_policies._session + session2 = client2.transport.list_gateway_security_policies._session + assert session1 != session2 + session1 = client1.transport.get_gateway_security_policy._session + session2 = client2.transport.get_gateway_security_policy._session + assert session1 != session2 + session1 = client1.transport.create_gateway_security_policy._session + session2 = client2.transport.create_gateway_security_policy._session + assert session1 != session2 + session1 = client1.transport.update_gateway_security_policy._session + session2 = client2.transport.update_gateway_security_policy._session + assert session1 != session2 + session1 = client1.transport.delete_gateway_security_policy._session + session2 = client2.transport.delete_gateway_security_policy._session + assert session1 != session2 + session1 = client1.transport.list_gateway_security_policy_rules._session + session2 = client2.transport.list_gateway_security_policy_rules._session + assert session1 != session2 + session1 = client1.transport.get_gateway_security_policy_rule._session + session2 = client2.transport.get_gateway_security_policy_rule._session + assert session1 != session2 + session1 = client1.transport.create_gateway_security_policy_rule._session + session2 = client2.transport.create_gateway_security_policy_rule._session + assert session1 != session2 + session1 = client1.transport.update_gateway_security_policy_rule._session + session2 = client2.transport.update_gateway_security_policy_rule._session + assert session1 != session2 + session1 = client1.transport.delete_gateway_security_policy_rule._session + session2 = client2.transport.delete_gateway_security_policy_rule._session + assert session1 != session2 + session1 = client1.transport.list_url_lists._session + session2 = client2.transport.list_url_lists._session + assert session1 != session2 + session1 = client1.transport.get_url_list._session + session2 = client2.transport.get_url_list._session + assert session1 != session2 + session1 = client1.transport.create_url_list._session + session2 = client2.transport.create_url_list._session + assert session1 != session2 + session1 = client1.transport.update_url_list._session + session2 = client2.transport.update_url_list._session + assert session1 != session2 + session1 = client1.transport.delete_url_list._session + session2 = client2.transport.delete_url_list._session + assert session1 != session2 + session1 = client1.transport.list_tls_inspection_policies._session + session2 = client2.transport.list_tls_inspection_policies._session + assert session1 != session2 + session1 = client1.transport.get_tls_inspection_policy._session + session2 = client2.transport.get_tls_inspection_policy._session + assert session1 != session2 + session1 = client1.transport.create_tls_inspection_policy._session + session2 = client2.transport.create_tls_inspection_policy._session + assert session1 != session2 + session1 = client1.transport.update_tls_inspection_policy._session + session2 = client2.transport.update_tls_inspection_policy._session + assert session1 != session2 + session1 = client1.transport.delete_tls_inspection_policy._session + session2 = client2.transport.delete_tls_inspection_policy._session + assert session1 != session2 + session1 = client1.transport.list_authz_policies._session + session2 = client2.transport.list_authz_policies._session + assert session1 != session2 + session1 = client1.transport.get_authz_policy._session + session2 = client2.transport.get_authz_policy._session + assert session1 != session2 + session1 = client1.transport.create_authz_policy._session + session2 = client2.transport.create_authz_policy._session + assert session1 != session2 + session1 = client1.transport.update_authz_policy._session + session2 = client2.transport.update_authz_policy._session + assert session1 != session2 + session1 = client1.transport.delete_authz_policy._session + session2 = client2.transport.delete_authz_policy._session + assert session1 != session2 def test_network_security_grpc_transport_channel(): @@ -6571,10 +40866,148 @@ def test_network_security_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_client_tls_policy_path(): +def test_authorization_policy_path(): + project = "squid" + location = "clam" + authorization_policy = "whelk" + expected = "projects/{project}/locations/{location}/authorizationPolicies/{authorization_policy}".format( + project=project, + location=location, + authorization_policy=authorization_policy, + ) + actual = NetworkSecurityClient.authorization_policy_path( + project, location, authorization_policy + ) + assert expected == actual + + +def test_parse_authorization_policy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "authorization_policy": "nudibranch", + } + path = NetworkSecurityClient.authorization_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_authorization_policy_path(path) + assert expected == actual + + +def test_authz_policy_path(): + project = "cuttlefish" + location = "mussel" + authz_policy = "winkle" + expected = ( + "projects/{project}/locations/{location}/authzPolicies/{authz_policy}".format( + project=project, + location=location, + authz_policy=authz_policy, + ) + ) + actual = NetworkSecurityClient.authz_policy_path(project, location, authz_policy) + assert expected == actual + + +def test_parse_authz_policy_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "authz_policy": "abalone", + } + path = NetworkSecurityClient.authz_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_authz_policy_path(path) + assert expected == actual + + +def test_backend_authentication_config_path(): + project = "squid" + location = "clam" + backend_authentication_config = "whelk" + expected = "projects/{project}/locations/{location}/backendAuthenticationConfigs/{backend_authentication_config}".format( + project=project, + location=location, + backend_authentication_config=backend_authentication_config, + ) + actual = NetworkSecurityClient.backend_authentication_config_path( + project, location, backend_authentication_config + ) + assert expected == actual + + +def test_parse_backend_authentication_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "backend_authentication_config": "nudibranch", + } + path = NetworkSecurityClient.backend_authentication_config_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_backend_authentication_config_path(path) + assert expected == actual + + +def test_ca_pool_path(): + project = "cuttlefish" + location = "mussel" + ca_pool = "winkle" + expected = "projects/{project}/locations/{location}/caPools/{ca_pool}".format( + project=project, + location=location, + ca_pool=ca_pool, + ) + actual = NetworkSecurityClient.ca_pool_path(project, location, ca_pool) + assert expected == actual + + +def test_parse_ca_pool_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "ca_pool": "abalone", + } + path = NetworkSecurityClient.ca_pool_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_ca_pool_path(path) + assert expected == actual + + +def test_certificate_path(): project = "squid" location = "clam" - client_tls_policy = "whelk" + certificate = "whelk" + expected = ( + "projects/{project}/locations/{location}/certificates/{certificate}".format( + project=project, + location=location, + certificate=certificate, + ) + ) + actual = NetworkSecurityClient.certificate_path(project, location, certificate) + assert expected == actual + + +def test_parse_certificate_path(): + expected = { + "project": "octopus", + "location": "oyster", + "certificate": "nudibranch", + } + path = NetworkSecurityClient.certificate_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_certificate_path(path) + assert expected == actual + + +def test_client_tls_policy_path(): + project = "cuttlefish" + location = "mussel" + client_tls_policy = "winkle" expected = "projects/{project}/locations/{location}/clientTlsPolicies/{client_tls_policy}".format( project=project, location=location, @@ -6588,9 +41021,9 @@ def test_client_tls_policy_path(): def test_parse_client_tls_policy_path(): expected = { - "project": "octopus", - "location": "oyster", - "client_tls_policy": "nudibranch", + "project": "nautilus", + "location": "scallop", + "client_tls_policy": "abalone", } path = NetworkSecurityClient.client_tls_policy_path(**expected) @@ -6599,8 +41032,177 @@ def test_parse_client_tls_policy_path(): assert expected == actual +def test_gateway_security_policy_path(): + project = "squid" + location = "clam" + gateway_security_policy = "whelk" + expected = "projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy}".format( + project=project, + location=location, + gateway_security_policy=gateway_security_policy, + ) + actual = NetworkSecurityClient.gateway_security_policy_path( + project, location, gateway_security_policy + ) + assert expected == actual + + +def test_parse_gateway_security_policy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "gateway_security_policy": "nudibranch", + } + path = NetworkSecurityClient.gateway_security_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_gateway_security_policy_path(path) + assert expected == actual + + +def test_gateway_security_policy_rule_path(): + project = "cuttlefish" + location = "mussel" + gateway_security_policy = "winkle" + rule = "nautilus" + expected = "projects/{project}/locations/{location}/gatewaySecurityPolicies/{gateway_security_policy}/rules/{rule}".format( + project=project, + location=location, + gateway_security_policy=gateway_security_policy, + rule=rule, + ) + actual = NetworkSecurityClient.gateway_security_policy_rule_path( + project, location, gateway_security_policy, rule + ) + assert expected == actual + + +def test_parse_gateway_security_policy_rule_path(): + expected = { + "project": "scallop", + "location": "abalone", + "gateway_security_policy": "squid", + "rule": "clam", + } + path = NetworkSecurityClient.gateway_security_policy_rule_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_gateway_security_policy_rule_path(path) + assert expected == actual + + +def test_server_tls_policy_path(): + project = "whelk" + location = "octopus" + server_tls_policy = "oyster" + expected = "projects/{project}/locations/{location}/serverTlsPolicies/{server_tls_policy}".format( + project=project, + location=location, + server_tls_policy=server_tls_policy, + ) + actual = NetworkSecurityClient.server_tls_policy_path( + project, location, server_tls_policy + ) + assert expected == actual + + +def test_parse_server_tls_policy_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "server_tls_policy": "mussel", + } + path = NetworkSecurityClient.server_tls_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_server_tls_policy_path(path) + assert expected == actual + + +def test_tls_inspection_policy_path(): + project = "winkle" + location = "nautilus" + tls_inspection_policy = "scallop" + expected = "projects/{project}/locations/{location}/tlsInspectionPolicies/{tls_inspection_policy}".format( + project=project, + location=location, + tls_inspection_policy=tls_inspection_policy, + ) + actual = NetworkSecurityClient.tls_inspection_policy_path( + project, location, tls_inspection_policy + ) + assert expected == actual + + +def test_parse_tls_inspection_policy_path(): + expected = { + "project": "abalone", + "location": "squid", + "tls_inspection_policy": "clam", + } + path = NetworkSecurityClient.tls_inspection_policy_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_tls_inspection_policy_path(path) + assert expected == actual + + +def test_trust_config_path(): + project = "whelk" + location = "octopus" + trust_config = "oyster" + expected = ( + "projects/{project}/locations/{location}/trustConfigs/{trust_config}".format( + project=project, + location=location, + trust_config=trust_config, + ) + ) + actual = NetworkSecurityClient.trust_config_path(project, location, trust_config) + assert expected == actual + + +def test_parse_trust_config_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "trust_config": "mussel", + } + path = NetworkSecurityClient.trust_config_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_trust_config_path(path) + assert expected == actual + + +def test_url_list_path(): + project = "winkle" + location = "nautilus" + url_list = "scallop" + expected = "projects/{project}/locations/{location}/urlLists/{url_list}".format( + project=project, + location=location, + url_list=url_list, + ) + actual = NetworkSecurityClient.url_list_path(project, location, url_list) + assert expected == actual + + +def test_parse_url_list_path(): + expected = { + "project": "abalone", + "location": "squid", + "url_list": "clam", + } + path = NetworkSecurityClient.url_list_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkSecurityClient.parse_url_list_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6610,7 +41212,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = NetworkSecurityClient.common_billing_account_path(**expected) @@ -6620,7 +41222,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -6630,7 +41232,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = NetworkSecurityClient.common_folder_path(**expected) @@ -6640,7 +41242,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -6650,7 +41252,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = NetworkSecurityClient.common_organization_path(**expected) @@ -6660,7 +41262,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -6670,7 +41272,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = NetworkSecurityClient.common_project_path(**expected) @@ -6680,8 +41282,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6692,8 +41294,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = NetworkSecurityClient.common_location_path(**expected) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_organization_security_profile_group_service.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_organization_security_profile_group_service.py new file mode 100644 index 000000000000..fa41486c441e --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_organization_security_profile_group_service.py @@ -0,0 +1,12991 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service import ( + OrganizationSecurityProfileGroupServiceAsyncClient, + OrganizationSecurityProfileGroupServiceClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group_intercept, + security_profile_group_mirroring, + security_profile_group_service, + security_profile_group_threatprevention, + security_profile_group_urlfiltering, +) +from google.cloud.network_security_v1alpha1.types import ( + security_profile_group as gcn_security_profile_group, +) +from google.cloud.network_security_v1alpha1.types import common +from google.cloud.network_security_v1alpha1.types import security_profile_group + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint(None) + is None + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint( + api_endpoint + ) + == api_mtls_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_default_mtls_endpoint( + non_googleapi + ) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (True, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "never", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "always", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ( + OrganizationSecurityProfileGroupServiceClient._read_environment_variables() + == (False, "auto", "foo.com") + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + None, False + ) + is None + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + None, True + ) + is mock_default_cert_source + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationSecurityProfileGroupServiceClient), +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template( + OrganizationSecurityProfileGroupServiceAsyncClient + ), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OrganizationSecurityProfileGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == OrganizationSecurityProfileGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OrganizationSecurityProfileGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + OrganizationSecurityProfileGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + OrganizationSecurityProfileGroupServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + OrganizationSecurityProfileGroupServiceClient._get_universe_domain(None, None) + == OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + OrganizationSecurityProfileGroupServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = OrganizationSecurityProfileGroupServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = OrganizationSecurityProfileGroupServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OrganizationSecurityProfileGroupServiceClient, "grpc"), + (OrganizationSecurityProfileGroupServiceAsyncClient, "grpc_asyncio"), + (OrganizationSecurityProfileGroupServiceClient, "rest"), + ], +) +def test_organization_security_profile_group_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OrganizationSecurityProfileGroupServiceGrpcTransport, "grpc"), + ( + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (transports.OrganizationSecurityProfileGroupServiceRestTransport, "rest"), + ], +) +def test_organization_security_profile_group_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OrganizationSecurityProfileGroupServiceClient, "grpc"), + (OrganizationSecurityProfileGroupServiceAsyncClient, "grpc_asyncio"), + (OrganizationSecurityProfileGroupServiceClient, "rest"), + ], +) +def test_organization_security_profile_group_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_organization_security_profile_group_service_client_get_transport_class(): + transport = OrganizationSecurityProfileGroupServiceClient.get_transport_class() + available_transports = [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + ] + assert transport in available_transports + + transport = OrganizationSecurityProfileGroupServiceClient.get_transport_class( + "grpc" + ) + assert transport == transports.OrganizationSecurityProfileGroupServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationSecurityProfileGroupServiceClient), +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template( + OrganizationSecurityProfileGroupServiceAsyncClient + ), +) +def test_organization_security_profile_group_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + "true", + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + "false", + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + "rest", + "true", + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationSecurityProfileGroupServiceClient), +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template( + OrganizationSecurityProfileGroupServiceAsyncClient + ), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_organization_security_profile_group_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [ + OrganizationSecurityProfileGroupServiceClient, + OrganizationSecurityProfileGroupServiceAsyncClient, + ], +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OrganizationSecurityProfileGroupServiceClient), +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OrganizationSecurityProfileGroupServiceAsyncClient), +) +def test_organization_security_profile_group_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [ + OrganizationSecurityProfileGroupServiceClient, + OrganizationSecurityProfileGroupServiceAsyncClient, + ], +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationSecurityProfileGroupServiceClient), +) +@mock.patch.object( + OrganizationSecurityProfileGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template( + OrganizationSecurityProfileGroupServiceAsyncClient + ), +) +def test_organization_security_profile_group_service_client_client_api_endpoint( + client_class, +): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OrganizationSecurityProfileGroupServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + OrganizationSecurityProfileGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + "rest", + ), + ], +) +def test_organization_security_profile_group_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + "rest", + None, + ), + ], +) +def test_organization_security_profile_group_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_organization_security_profile_group_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.transports.OrganizationSecurityProfileGroupServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OrganizationSecurityProfileGroupServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_organization_security_profile_group_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.ListSecurityProfileGroupsRequest, + dict, + ], +) +def test_list_security_profile_groups(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.ListSecurityProfileGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfileGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_security_profile_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.ListSecurityProfileGroupsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_security_profile_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == security_profile_group_service.ListSecurityProfileGroupsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_security_profile_groups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_profile_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_profile_groups + ] = mock_rpc + request = {} + client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_profile_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_security_profile_groups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_security_profile_groups + ] = mock_rpc + + request = {} + await client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_security_profile_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.ListSecurityProfileGroupsRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfileGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.ListSecurityProfileGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfileGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_async_from_dict(): + await test_list_security_profile_groups_async(request_type=dict) + + +def test_list_security_profile_groups_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.ListSecurityProfileGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + call.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.ListSecurityProfileGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + await client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_security_profile_groups_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_security_profile_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_security_profile_groups_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_profile_groups( + security_profile_group_service.ListSecurityProfileGroupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_security_profile_groups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_security_profile_groups( + security_profile_group_service.ListSecurityProfileGroupsRequest(), + parent="parent_value", + ) + + +def test_list_security_profile_groups_pager(transport_name: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_security_profile_groups( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfileGroup) for i in results + ) + + +def test_list_security_profile_groups_pages(transport_name: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_security_profile_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_async_pager(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_security_profile_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfileGroup) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_security_profile_groups_async_pages(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_security_profile_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.GetSecurityProfileGroupRequest, + dict, + ], +) +def test_get_security_profile_group(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfileGroup( + name="name_value", + description="description_value", + etag="etag_value", + data_path_id=1234, + threat_prevention_profile="threat_prevention_profile_value", + custom_mirroring_profile="custom_mirroring_profile_value", + custom_intercept_profile="custom_intercept_profile_value", + url_filtering_profile="url_filtering_profile_value", + ) + response = client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.GetSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfileGroup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.data_path_id == 1234 + assert response.threat_prevention_profile == "threat_prevention_profile_value" + assert response.custom_mirroring_profile == "custom_mirroring_profile_value" + assert response.custom_intercept_profile == "custom_intercept_profile_value" + assert response.url_filtering_profile == "url_filtering_profile_value" + + +def test_get_security_profile_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.GetSecurityProfileGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_security_profile_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.GetSecurityProfileGroupRequest( + name="name_value", + ) + + +def test_get_security_profile_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_security_profile_group + ] = mock_rpc + request = {} + client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_profile_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_security_profile_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_security_profile_group + ] = mock_rpc + + request = {} + await client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_profile_group_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.GetSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfileGroup( + name="name_value", + description="description_value", + etag="etag_value", + data_path_id=1234, + threat_prevention_profile="threat_prevention_profile_value", + custom_mirroring_profile="custom_mirroring_profile_value", + custom_intercept_profile="custom_intercept_profile_value", + url_filtering_profile="url_filtering_profile_value", + ) + ) + response = await client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.GetSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfileGroup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.data_path_id == 1234 + assert response.threat_prevention_profile == "threat_prevention_profile_value" + assert response.custom_mirroring_profile == "custom_mirroring_profile_value" + assert response.custom_intercept_profile == "custom_intercept_profile_value" + assert response.url_filtering_profile == "url_filtering_profile_value" + + +@pytest.mark.asyncio +async def test_get_security_profile_group_async_from_dict(): + await test_get_security_profile_group_async(request_type=dict) + + +def test_get_security_profile_group_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.GetSecurityProfileGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + call.return_value = security_profile_group.SecurityProfileGroup() + client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_security_profile_group_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.GetSecurityProfileGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfileGroup() + ) + await client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_security_profile_group_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfileGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_security_profile_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_security_profile_group_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_security_profile_group( + security_profile_group_service.GetSecurityProfileGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_security_profile_group_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfileGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfileGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_security_profile_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_security_profile_group_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_security_profile_group( + security_profile_group_service.GetSecurityProfileGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.CreateSecurityProfileGroupRequest, + dict, + ], +) +def test_create_security_profile_group(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.CreateSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_security_profile_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_security_profile_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == security_profile_group_service.CreateSecurityProfileGroupRequest( + parent="parent_value", + security_profile_group_id="security_profile_group_id_value", + ) + + +def test_create_security_profile_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_security_profile_group + ] = mock_rpc + request = {} + client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_security_profile_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_security_profile_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_security_profile_group + ] = mock_rpc + + request = {} + await client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_security_profile_group_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.CreateSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.CreateSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_security_profile_group_async_from_dict(): + await test_create_security_profile_group_async(request_type=dict) + + +def test_create_security_profile_group_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.CreateSecurityProfileGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_security_profile_group_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.CreateSecurityProfileGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_security_profile_group_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_security_profile_group( + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].security_profile_group + mock_val = gcn_security_profile_group.SecurityProfileGroup(name="name_value") + assert arg == mock_val + arg = args[0].security_profile_group_id + mock_val = "security_profile_group_id_value" + assert arg == mock_val + + +def test_create_security_profile_group_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_security_profile_group( + security_profile_group_service.CreateSecurityProfileGroupRequest(), + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_security_profile_group_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_security_profile_group( + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].security_profile_group + mock_val = gcn_security_profile_group.SecurityProfileGroup(name="name_value") + assert arg == mock_val + arg = args[0].security_profile_group_id + mock_val = "security_profile_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_security_profile_group_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_security_profile_group( + security_profile_group_service.CreateSecurityProfileGroupRequest(), + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.UpdateSecurityProfileGroupRequest, + dict, + ], +) +def test_update_security_profile_group(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_security_profile_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_security_profile_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == security_profile_group_service.UpdateSecurityProfileGroupRequest() + ) + + +def test_update_security_profile_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_security_profile_group + ] = mock_rpc + request = {} + client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_profile_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_security_profile_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_security_profile_group + ] = mock_rpc + + request = {} + await client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_profile_group_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.UpdateSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_security_profile_group_async_from_dict(): + await test_update_security_profile_group_async(request_type=dict) + + +def test_update_security_profile_group_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + request.security_profile_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_profile_group.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_security_profile_group_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + request.security_profile_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_profile_group.name=name_value", + ) in kw["metadata"] + + +def test_update_security_profile_group_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_security_profile_group( + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].security_profile_group + mock_val = gcn_security_profile_group.SecurityProfileGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_security_profile_group_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_profile_group( + security_profile_group_service.UpdateSecurityProfileGroupRequest(), + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_security_profile_group_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_security_profile_group( + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].security_profile_group + mock_val = gcn_security_profile_group.SecurityProfileGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_security_profile_group_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_security_profile_group( + security_profile_group_service.UpdateSecurityProfileGroupRequest(), + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.DeleteSecurityProfileGroupRequest, + dict, + ], +) +def test_delete_security_profile_group(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.DeleteSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_security_profile_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.DeleteSecurityProfileGroupRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_security_profile_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == security_profile_group_service.DeleteSecurityProfileGroupRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_security_profile_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_security_profile_group + ] = mock_rpc + request = {} + client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_security_profile_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_security_profile_group + ] = mock_rpc + + request = {} + await client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.DeleteSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.DeleteSecurityProfileGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_async_from_dict(): + await test_delete_security_profile_group_async(request_type=dict) + + +def test_delete_security_profile_group_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.DeleteSecurityProfileGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.DeleteSecurityProfileGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_security_profile_group_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_security_profile_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_security_profile_group_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_security_profile_group( + security_profile_group_service.DeleteSecurityProfileGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_security_profile_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_security_profile_group_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_security_profile_group( + security_profile_group_service.DeleteSecurityProfileGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.ListSecurityProfilesRequest, + dict, + ], +) +def test_list_security_profiles(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group_service.ListSecurityProfilesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.ListSecurityProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfilesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_security_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.ListSecurityProfilesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_security_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.ListSecurityProfilesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_security_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_profiles + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_profiles + ] = mock_rpc + request = {} + client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_profiles_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_security_profiles + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_security_profiles + ] = mock_rpc + + request = {} + await client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_security_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_security_profiles_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.ListSecurityProfilesRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfilesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.ListSecurityProfilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfilesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_security_profiles_async_from_dict(): + await test_list_security_profiles_async(request_type=dict) + + +def test_list_security_profiles_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.ListSecurityProfilesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + call.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse() + ) + client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_security_profiles_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.ListSecurityProfilesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfilesResponse() + ) + await client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_security_profiles_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_security_profiles( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_security_profiles_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_profiles( + security_profile_group_service.ListSecurityProfilesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_security_profiles_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfilesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_security_profiles( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_security_profiles_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_security_profiles( + security_profile_group_service.ListSecurityProfilesRequest(), + parent="parent_value", + ) + + +def test_list_security_profiles_pager(transport_name: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_security_profiles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfile) for i in results + ) + + +def test_list_security_profiles_pages(transport_name: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + ), + RuntimeError, + ) + pages = list(client.list_security_profiles(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_security_profiles_async_pager(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_security_profiles( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfile) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_security_profiles_async_pages(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_security_profiles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.GetSecurityProfileRequest, + dict, + ], +) +def test_get_security_profile(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfile( + name="name_value", + description="description_value", + etag="etag_value", + type_=security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION, + ) + response = client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.GetSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfile) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert ( + response.type_ + == security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION + ) + + +def test_get_security_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.GetSecurityProfileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_security_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.GetSecurityProfileRequest( + name="name_value", + ) + + +def test_get_security_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_security_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_security_profile + ] = mock_rpc + request = {} + client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_security_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_security_profile + ] = mock_rpc + + request = {} + await client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_security_profile_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.GetSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfile( + name="name_value", + description="description_value", + etag="etag_value", + type_=security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION, + ) + ) + response = await client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.GetSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfile) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert ( + response.type_ + == security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION + ) + + +@pytest.mark.asyncio +async def test_get_security_profile_async_from_dict(): + await test_get_security_profile_async(request_type=dict) + + +def test_get_security_profile_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.GetSecurityProfileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + call.return_value = security_profile_group.SecurityProfile() + client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_security_profile_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.GetSecurityProfileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfile() + ) + await client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_security_profile_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_security_profile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_security_profile_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_security_profile( + security_profile_group_service.GetSecurityProfileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_security_profile_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = security_profile_group.SecurityProfile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfile() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_security_profile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_security_profile_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_security_profile( + security_profile_group_service.GetSecurityProfileRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.CreateSecurityProfileRequest, + dict, + ], +) +def test_create_security_profile(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.CreateSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_security_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_security_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.CreateSecurityProfileRequest( + parent="parent_value", + security_profile_id="security_profile_id_value", + ) + + +def test_create_security_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_security_profile + ] = mock_rpc + request = {} + client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_security_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_security_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_security_profile + ] = mock_rpc + + request = {} + await client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_security_profile_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.CreateSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.CreateSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_security_profile_async_from_dict(): + await test_create_security_profile_async(request_type=dict) + + +def test_create_security_profile_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.CreateSecurityProfileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_security_profile_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.CreateSecurityProfileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_security_profile_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_security_profile( + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].security_profile + mock_val = security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ) + assert arg == mock_val + arg = args[0].security_profile_id + mock_val = "security_profile_id_value" + assert arg == mock_val + + +def test_create_security_profile_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_security_profile( + security_profile_group_service.CreateSecurityProfileRequest(), + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_security_profile_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_security_profile( + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].security_profile + mock_val = security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ) + assert arg == mock_val + arg = args[0].security_profile_id + mock_val = "security_profile_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_security_profile_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_security_profile( + security_profile_group_service.CreateSecurityProfileRequest(), + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.UpdateSecurityProfileRequest, + dict, + ], +) +def test_update_security_profile(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.UpdateSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_security_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.UpdateSecurityProfileRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_security_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.UpdateSecurityProfileRequest() + + +def test_update_security_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_security_profile + ] = mock_rpc + request = {} + client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_security_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_security_profile + ] = mock_rpc + + request = {} + await client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_security_profile_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.UpdateSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.UpdateSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_security_profile_async_from_dict(): + await test_update_security_profile_async(request_type=dict) + + +def test_update_security_profile_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.UpdateSecurityProfileRequest() + + request.security_profile.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_profile.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_security_profile_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.UpdateSecurityProfileRequest() + + request.security_profile.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "security_profile.name=name_value", + ) in kw["metadata"] + + +def test_update_security_profile_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_security_profile( + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].security_profile + mock_val = security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_security_profile_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_profile( + security_profile_group_service.UpdateSecurityProfileRequest(), + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_security_profile_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_security_profile( + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].security_profile + mock_val = security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_security_profile_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_security_profile( + security_profile_group_service.UpdateSecurityProfileRequest(), + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.DeleteSecurityProfileRequest, + dict, + ], +) +def test_delete_security_profile(request_type, transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.DeleteSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_security_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = security_profile_group_service.DeleteSecurityProfileRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_security_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == security_profile_group_service.DeleteSecurityProfileRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_security_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_security_profile + ] = mock_rpc + request = {} + client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_security_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_security_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_security_profile + ] = mock_rpc + + request = {} + await client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_security_profile_async( + transport: str = "grpc_asyncio", + request_type=security_profile_group_service.DeleteSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = security_profile_group_service.DeleteSecurityProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_security_profile_async_from_dict(): + await test_delete_security_profile_async(request_type=dict) + + +def test_delete_security_profile_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.DeleteSecurityProfileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_security_profile_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = security_profile_group_service.DeleteSecurityProfileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_security_profile_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_security_profile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_security_profile_flattened_error(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_security_profile( + security_profile_group_service.DeleteSecurityProfileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_security_profile_flattened_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_security_profile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_security_profile_flattened_error_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_security_profile( + security_profile_group_service.DeleteSecurityProfileRequest(), + name="name_value", + ) + + +def test_list_security_profile_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_profile_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_profile_groups + ] = mock_rpc + + request = {} + client.list_security_profile_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_profile_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_security_profile_groups_rest_required_fields( + request_type=security_profile_group_service.ListSecurityProfileGroupsRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_profile_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_profile_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = security_profile_group_service.ListSecurityProfileGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_security_profile_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_security_profile_groups_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_security_profile_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_security_profile_groups_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_security_profile_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/securityProfileGroups" + % client.transport._host, + args[1], + ) + + +def test_list_security_profile_groups_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_profile_groups( + security_profile_group_service.ListSecurityProfileGroupsRequest(), + parent="parent_value", + ) + + +def test_list_security_profile_groups_rest_pager(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfileGroupsResponse( + security_profile_groups=[ + security_profile_group.SecurityProfileGroup(), + security_profile_group.SecurityProfileGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_profile_group_service.ListSecurityProfileGroupsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_security_profile_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfileGroup) for i in results + ) + + pages = list(client.list_security_profile_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_security_profile_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_security_profile_group + ] = mock_rpc + + request = {} + client.get_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_security_profile_group_rest_required_fields( + request_type=security_profile_group_service.GetSecurityProfileGroupRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_security_profile_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_security_profile_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfileGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfileGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_security_profile_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_security_profile_group_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_security_profile_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_security_profile_group_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfileGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfileGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_security_profile_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/securityProfileGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_security_profile_group_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_security_profile_group( + security_profile_group_service.GetSecurityProfileGroupRequest(), + name="name_value", + ) + + +def test_create_security_profile_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_security_profile_group + ] = mock_rpc + + request = {} + client.create_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_security_profile_group_rest_required_fields( + request_type=security_profile_group_service.CreateSecurityProfileGroupRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["security_profile_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "securityProfileGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_security_profile_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "securityProfileGroupId" in jsonified_request + assert ( + jsonified_request["securityProfileGroupId"] + == request_init["security_profile_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["securityProfileGroupId"] = "security_profile_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_security_profile_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("security_profile_group_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "securityProfileGroupId" in jsonified_request + assert ( + jsonified_request["securityProfileGroupId"] == "security_profile_group_id_value" + ) + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_security_profile_group(request) + + expected_params = [ + ( + "securityProfileGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_security_profile_group_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_security_profile_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("securityProfileGroupId",)) + & set( + ( + "parent", + "securityProfileGroupId", + "securityProfileGroup", + ) + ) + ) + + +def test_create_security_profile_group_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_security_profile_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/securityProfileGroups" + % client.transport._host, + args[1], + ) + + +def test_create_security_profile_group_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_security_profile_group( + security_profile_group_service.CreateSecurityProfileGroupRequest(), + parent="parent_value", + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + security_profile_group_id="security_profile_group_id_value", + ) + + +def test_update_security_profile_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_security_profile_group + ] = mock_rpc + + request = {} + client.update_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_security_profile_group_rest_required_fields( + request_type=security_profile_group_service.UpdateSecurityProfileGroupRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_security_profile_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_security_profile_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_security_profile_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_security_profile_group_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_security_profile_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "securityProfileGroup", + ) + ) + ) + + +def test_update_security_profile_group_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "security_profile_group": { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_security_profile_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{security_profile_group.name=organizations/*/locations/*/securityProfileGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_security_profile_group_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_profile_group( + security_profile_group_service.UpdateSecurityProfileGroupRequest(), + security_profile_group=gcn_security_profile_group.SecurityProfileGroup( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_security_profile_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_security_profile_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_security_profile_group + ] = mock_rpc + + request = {} + client.delete_security_profile_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_security_profile_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_security_profile_group_rest_required_fields( + request_type=security_profile_group_service.DeleteSecurityProfileGroupRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_security_profile_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_security_profile_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_security_profile_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_security_profile_group_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_security_profile_group._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +def test_delete_security_profile_group_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_security_profile_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/securityProfileGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_security_profile_group_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_security_profile_group( + security_profile_group_service.DeleteSecurityProfileGroupRequest(), + name="name_value", + ) + + +def test_list_security_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_security_profiles + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_security_profiles + ] = mock_rpc + + request = {} + client.list_security_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_security_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_security_profiles_rest_required_fields( + request_type=security_profile_group_service.ListSecurityProfilesRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_security_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = security_profile_group_service.ListSecurityProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + security_profile_group_service.ListSecurityProfilesResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_security_profiles(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_security_profiles_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_security_profiles._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_security_profiles_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group_service.ListSecurityProfilesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_profile_group_service.ListSecurityProfilesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_security_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/securityProfiles" + % client.transport._host, + args[1], + ) + + +def test_list_security_profiles_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_security_profiles( + security_profile_group_service.ListSecurityProfilesRequest(), + parent="parent_value", + ) + + +def test_list_security_profiles_rest_pager(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + next_page_token="abc", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[], + next_page_token="def", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + ], + next_page_token="ghi", + ), + security_profile_group_service.ListSecurityProfilesResponse( + security_profiles=[ + security_profile_group.SecurityProfile(), + security_profile_group.SecurityProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + security_profile_group_service.ListSecurityProfilesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_security_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, security_profile_group.SecurityProfile) for i in results + ) + + pages = list(client.list_security_profiles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_security_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_security_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_security_profile + ] = mock_rpc + + request = {} + client.get_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_security_profile_rest_required_fields( + request_type=security_profile_group_service.GetSecurityProfileRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_security_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_security_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_security_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_security_profile_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_security_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_security_profile_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfile() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_security_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/securityProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_get_security_profile_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_security_profile( + security_profile_group_service.GetSecurityProfileRequest(), + name="name_value", + ) + + +def test_create_security_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_security_profile + ] = mock_rpc + + request = {} + client.create_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_security_profile_rest_required_fields( + request_type=security_profile_group_service.CreateSecurityProfileRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["security_profile_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "securityProfileId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_security_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "securityProfileId" in jsonified_request + assert jsonified_request["securityProfileId"] == request_init["security_profile_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["securityProfileId"] = "security_profile_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_security_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("security_profile_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "securityProfileId" in jsonified_request + assert jsonified_request["securityProfileId"] == "security_profile_id_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_security_profile(request) + + expected_params = [ + ( + "securityProfileId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_security_profile_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_security_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("securityProfileId",)) + & set( + ( + "parent", + "securityProfileId", + "securityProfile", + ) + ) + ) + + +def test_create_security_profile_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_security_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=organizations/*/locations/*}/securityProfiles" + % client.transport._host, + args[1], + ) + + +def test_create_security_profile_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_security_profile( + security_profile_group_service.CreateSecurityProfileRequest(), + parent="parent_value", + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + security_profile_id="security_profile_id_value", + ) + + +def test_update_security_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_security_profile + ] = mock_rpc + + request = {} + client.update_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_security_profile_rest_required_fields( + request_type=security_profile_group_service.UpdateSecurityProfileRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_security_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_security_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_security_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_security_profile_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_security_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "securityProfile", + ) + ) + ) + + +def test_update_security_profile_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "security_profile": { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_security_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{security_profile.name=organizations/*/locations/*/securityProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_update_security_profile_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_security_profile( + security_profile_group_service.UpdateSecurityProfileRequest(), + security_profile=security_profile_group.SecurityProfile( + threat_prevention_profile=security_profile_group_threatprevention.ThreatPreventionProfile( + severity_overrides=[ + security_profile_group_threatprevention.SeverityOverride( + severity=security_profile_group_threatprevention.Severity.INFORMATIONAL + ) + ] + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_security_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_security_profile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_security_profile + ] = mock_rpc + + request = {} + client.delete_security_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_security_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_security_profile_rest_required_fields( + request_type=security_profile_group_service.DeleteSecurityProfileRequest, +): + transport_class = transports.OrganizationSecurityProfileGroupServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_security_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_security_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_security_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_security_profile_rest_unset_required_fields(): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_security_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +def test_delete_security_profile_rest_flattened(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_security_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=organizations/*/locations/*/securityProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_security_profile_rest_flattened_error(transport: str = "rest"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_security_profile( + security_profile_group_service.DeleteSecurityProfileRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationSecurityProfileGroupServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OrganizationSecurityProfileGroupServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OrganizationSecurityProfileGroupServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationSecurityProfileGroupServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = OrganizationSecurityProfileGroupServiceClient.get_transport_class( + "grpc" + )(credentials=ga_credentials.AnonymousCredentials()) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_security_profile_groups_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + call.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + client.list_security_profile_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfileGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_security_profile_group_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + call.return_value = security_profile_group.SecurityProfileGroup() + client.get_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_security_profile_group_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_security_profile_group_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_security_profile_group_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_security_profiles_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + call.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse() + ) + client.list_security_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_security_profile_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + call.return_value = security_profile_group.SecurityProfile() + client.get_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_security_profile_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_security_profile_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_security_profile_empty_call_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = OrganizationSecurityProfileGroupServiceAsyncClient.get_transport_class( + "grpc_asyncio" + )(credentials=async_anonymous_credentials()) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_security_profile_groups_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfileGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_security_profile_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfileGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_security_profile_group_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfileGroup( + name="name_value", + description="description_value", + etag="etag_value", + data_path_id=1234, + threat_prevention_profile="threat_prevention_profile_value", + custom_mirroring_profile="custom_mirroring_profile_value", + custom_intercept_profile="custom_intercept_profile_value", + url_filtering_profile="url_filtering_profile_value", + ) + ) + await client.get_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_security_profile_group_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_security_profile_group_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_security_profile_group_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_security_profiles_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group_service.ListSecurityProfilesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_security_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_security_profile_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + security_profile_group.SecurityProfile( + name="name_value", + description="description_value", + etag="etag_value", + type_=security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION, + ) + ) + await client.get_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_security_profile_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_security_profile_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_security_profile_empty_call_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = OrganizationSecurityProfileGroupServiceClient.get_transport_class( + "rest" + )(credentials=ga_credentials.AnonymousCredentials()) + assert transport.kind == "rest" + + +def test_list_security_profile_groups_rest_bad_request( + request_type=security_profile_group_service.ListSecurityProfileGroupsRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_security_profile_groups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.ListSecurityProfileGroupsRequest, + dict, + ], +) +def test_list_security_profile_groups_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group_service.ListSecurityProfileGroupsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_security_profile_groups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfileGroupsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_security_profile_groups_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_list_security_profile_groups", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_list_security_profile_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_list_security_profile_groups", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.ListSecurityProfileGroupsRequest.pb( + security_profile_group_service.ListSecurityProfileGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse.to_json( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + ) + req.return_value.content = return_value + + request = security_profile_group_service.ListSecurityProfileGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse() + ) + post_with_metadata.return_value = ( + security_profile_group_service.ListSecurityProfileGroupsResponse(), + metadata, + ) + + client.list_security_profile_groups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_security_profile_group_rest_bad_request( + request_type=security_profile_group_service.GetSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_security_profile_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.GetSecurityProfileGroupRequest, + dict, + ], +) +def test_get_security_profile_group_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfileGroup( + name="name_value", + description="description_value", + etag="etag_value", + data_path_id=1234, + threat_prevention_profile="threat_prevention_profile_value", + custom_mirroring_profile="custom_mirroring_profile_value", + custom_intercept_profile="custom_intercept_profile_value", + url_filtering_profile="url_filtering_profile_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfileGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_security_profile_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfileGroup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.data_path_id == 1234 + assert response.threat_prevention_profile == "threat_prevention_profile_value" + assert response.custom_mirroring_profile == "custom_mirroring_profile_value" + assert response.custom_intercept_profile == "custom_intercept_profile_value" + assert response.url_filtering_profile == "url_filtering_profile_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_security_profile_group_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_get_security_profile_group", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_get_security_profile_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_get_security_profile_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.GetSecurityProfileGroupRequest.pb( + security_profile_group_service.GetSecurityProfileGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = security_profile_group.SecurityProfileGroup.to_json( + security_profile_group.SecurityProfileGroup() + ) + req.return_value.content = return_value + + request = security_profile_group_service.GetSecurityProfileGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = security_profile_group.SecurityProfileGroup() + post_with_metadata.return_value = ( + security_profile_group.SecurityProfileGroup(), + metadata, + ) + + client.get_security_profile_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_security_profile_group_rest_bad_request( + request_type=security_profile_group_service.CreateSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_security_profile_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.CreateSecurityProfileGroupRequest, + dict, + ], +) +def test_create_security_profile_group_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["security_profile_group"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "data_path_id": 1234, + "labels": {}, + "threat_prevention_profile": "threat_prevention_profile_value", + "custom_mirroring_profile": "custom_mirroring_profile_value", + "custom_intercept_profile": "custom_intercept_profile_value", + "url_filtering_profile": "url_filtering_profile_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + security_profile_group_service.CreateSecurityProfileGroupRequest.meta.fields[ + "security_profile_group" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_profile_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_profile_group"][field])): + del request_init["security_profile_group"][field][i][subfield] + else: + del request_init["security_profile_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_security_profile_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_security_profile_group_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_create_security_profile_group", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_create_security_profile_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_create_security_profile_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + security_profile_group_service.CreateSecurityProfileGroupRequest.pb( + security_profile_group_service.CreateSecurityProfileGroupRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.CreateSecurityProfileGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_security_profile_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_security_profile_group_rest_bad_request( + request_type=security_profile_group_service.UpdateSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "security_profile_group": { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_security_profile_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.UpdateSecurityProfileGroupRequest, + dict, + ], +) +def test_update_security_profile_group_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "security_profile_group": { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + } + request_init["security_profile_group"] = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "data_path_id": 1234, + "labels": {}, + "threat_prevention_profile": "threat_prevention_profile_value", + "custom_mirroring_profile": "custom_mirroring_profile_value", + "custom_intercept_profile": "custom_intercept_profile_value", + "url_filtering_profile": "url_filtering_profile_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + security_profile_group_service.UpdateSecurityProfileGroupRequest.meta.fields[ + "security_profile_group" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "security_profile_group" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_profile_group"][field])): + del request_init["security_profile_group"][field][i][subfield] + else: + del request_init["security_profile_group"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_security_profile_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_security_profile_group_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_update_security_profile_group", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_update_security_profile_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_update_security_profile_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + security_profile_group_service.UpdateSecurityProfileGroupRequest.pb( + security_profile_group_service.UpdateSecurityProfileGroupRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.UpdateSecurityProfileGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_security_profile_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_security_profile_group_rest_bad_request( + request_type=security_profile_group_service.DeleteSecurityProfileGroupRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_security_profile_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.DeleteSecurityProfileGroupRequest, + dict, + ], +) +def test_delete_security_profile_group_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfileGroups/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_security_profile_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_security_profile_group_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_delete_security_profile_group", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_delete_security_profile_group_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_delete_security_profile_group", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = ( + security_profile_group_service.DeleteSecurityProfileGroupRequest.pb( + security_profile_group_service.DeleteSecurityProfileGroupRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.DeleteSecurityProfileGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_security_profile_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_security_profiles_rest_bad_request( + request_type=security_profile_group_service.ListSecurityProfilesRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_security_profiles(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.ListSecurityProfilesRequest, + dict, + ], +) +def test_list_security_profiles_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group_service.ListSecurityProfilesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_profile_group_service.ListSecurityProfilesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_security_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSecurityProfilesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_security_profiles_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_list_security_profiles", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_list_security_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_list_security_profiles", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.ListSecurityProfilesRequest.pb( + security_profile_group_service.ListSecurityProfilesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + security_profile_group_service.ListSecurityProfilesResponse.to_json( + security_profile_group_service.ListSecurityProfilesResponse() + ) + ) + req.return_value.content = return_value + + request = security_profile_group_service.ListSecurityProfilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse() + ) + post_with_metadata.return_value = ( + security_profile_group_service.ListSecurityProfilesResponse(), + metadata, + ) + + client.list_security_profiles( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_security_profile_rest_bad_request( + request_type=security_profile_group_service.GetSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_security_profile(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.GetSecurityProfileRequest, + dict, + ], +) +def test_get_security_profile_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = security_profile_group.SecurityProfile( + name="name_value", + description="description_value", + etag="etag_value", + type_=security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = security_profile_group.SecurityProfile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_security_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, security_profile_group.SecurityProfile) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert ( + response.type_ + == security_profile_group.SecurityProfile.ProfileType.THREAT_PREVENTION + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_security_profile_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_get_security_profile", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_get_security_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_get_security_profile", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.GetSecurityProfileRequest.pb( + security_profile_group_service.GetSecurityProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = security_profile_group.SecurityProfile.to_json( + security_profile_group.SecurityProfile() + ) + req.return_value.content = return_value + + request = security_profile_group_service.GetSecurityProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = security_profile_group.SecurityProfile() + post_with_metadata.return_value = ( + security_profile_group.SecurityProfile(), + metadata, + ) + + client.get_security_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_security_profile_rest_bad_request( + request_type=security_profile_group_service.CreateSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_security_profile(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.CreateSecurityProfileRequest, + dict, + ], +) +def test_create_security_profile_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["security_profile"] = { + "threat_prevention_profile": { + "severity_overrides": [{"severity": 1, "action": 4}], + "threat_overrides": [ + {"threat_id": "threat_id_value", "type_": 1, "action": 4} + ], + "antivirus_overrides": [{"protocol": 1, "action": 4}], + }, + "custom_mirroring_profile": { + "mirroring_endpoint_group": "mirroring_endpoint_group_value" + }, + "custom_intercept_profile": { + "intercept_endpoint_group": "intercept_endpoint_group_value" + }, + "url_filtering_profile": { + "url_filters": [ + { + "filtering_action": 1, + "urls": ["urls_value1", "urls_value2"], + "priority": 898, + } + ] + }, + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "labels": {}, + "type_": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + security_profile_group_service.CreateSecurityProfileRequest.meta.fields[ + "security_profile" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["security_profile"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_profile"][field])): + del request_init["security_profile"][field][i][subfield] + else: + del request_init["security_profile"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_security_profile(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_security_profile_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_create_security_profile", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_create_security_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_create_security_profile", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.CreateSecurityProfileRequest.pb( + security_profile_group_service.CreateSecurityProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.CreateSecurityProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_security_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_security_profile_rest_bad_request( + request_type=security_profile_group_service.UpdateSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "security_profile": { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_security_profile(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.UpdateSecurityProfileRequest, + dict, + ], +) +def test_update_security_profile_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "security_profile": { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + } + request_init["security_profile"] = { + "threat_prevention_profile": { + "severity_overrides": [{"severity": 1, "action": 4}], + "threat_overrides": [ + {"threat_id": "threat_id_value", "type_": 1, "action": 4} + ], + "antivirus_overrides": [{"protocol": 1, "action": 4}], + }, + "custom_mirroring_profile": { + "mirroring_endpoint_group": "mirroring_endpoint_group_value" + }, + "custom_intercept_profile": { + "intercept_endpoint_group": "intercept_endpoint_group_value" + }, + "url_filtering_profile": { + "url_filters": [ + { + "filtering_action": 1, + "urls": ["urls_value1", "urls_value2"], + "priority": 898, + } + ] + }, + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "etag": "etag_value", + "labels": {}, + "type_": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + security_profile_group_service.UpdateSecurityProfileRequest.meta.fields[ + "security_profile" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["security_profile"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["security_profile"][field])): + del request_init["security_profile"][field][i][subfield] + else: + del request_init["security_profile"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_security_profile(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_security_profile_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_update_security_profile", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_update_security_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_update_security_profile", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.UpdateSecurityProfileRequest.pb( + security_profile_group_service.UpdateSecurityProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.UpdateSecurityProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_security_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_security_profile_rest_bad_request( + request_type=security_profile_group_service.DeleteSecurityProfileRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_security_profile(request) + + +@pytest.mark.parametrize( + "request_type", + [ + security_profile_group_service.DeleteSecurityProfileRequest, + dict, + ], +) +def test_delete_security_profile_rest_call_success(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/securityProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_security_profile(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_security_profile_rest_interceptors(null_interceptor): + transport = transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationSecurityProfileGroupServiceRestInterceptor(), + ) + client = OrganizationSecurityProfileGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_delete_security_profile", + ) as post, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "post_delete_security_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceRestInterceptor, + "pre_delete_security_profile", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = security_profile_group_service.DeleteSecurityProfileRequest.pb( + security_profile_group_service.DeleteSecurityProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = security_profile_group_service.DeleteSecurityProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_security_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_security_profile_groups_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profile_groups), "__call__" + ) as call: + client.list_security_profile_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfileGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_security_profile_group_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile_group), "__call__" + ) as call: + client.get_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_security_profile_group_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile_group), "__call__" + ) as call: + client.create_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_security_profile_group_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile_group), "__call__" + ) as call: + client.update_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_security_profile_group_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile_group), "__call__" + ) as call: + client.delete_security_profile_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_security_profiles_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_security_profiles), "__call__" + ) as call: + client.list_security_profiles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.ListSecurityProfilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_security_profile_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_security_profile), "__call__" + ) as call: + client.get_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.GetSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_security_profile_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_security_profile), "__call__" + ) as call: + client.create_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.CreateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_security_profile_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_security_profile), "__call__" + ) as call: + client.update_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.UpdateSecurityProfileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_security_profile_empty_call_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_security_profile), "__call__" + ) as call: + client.delete_security_profile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = security_profile_group_service.DeleteSecurityProfileRequest() + + assert args[0] == request_msg + + +def test_organization_security_profile_group_service_rest_lro_client(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + ) + + +def test_organization_security_profile_group_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OrganizationSecurityProfileGroupServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_organization_security_profile_group_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.transports.OrganizationSecurityProfileGroupServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OrganizationSecurityProfileGroupServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_security_profile_groups", + "get_security_profile_group", + "create_security_profile_group", + "update_security_profile_group", + "delete_security_profile_group", + "list_security_profiles", + "get_security_profile", + "create_security_profile", + "update_security_profile", + "delete_security_profile", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_organization_security_profile_group_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.transports.OrganizationSecurityProfileGroupServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OrganizationSecurityProfileGroupServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_organization_security_profile_group_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.organization_security_profile_group_service.transports.OrganizationSecurityProfileGroupServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OrganizationSecurityProfileGroupServiceTransport() + adc.assert_called_once() + + +def test_organization_security_profile_group_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OrganizationSecurityProfileGroupServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + ], +) +def test_organization_security_profile_group_service_transport_auth_adc( + transport_class, +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + transports.OrganizationSecurityProfileGroupServiceRestTransport, + ], +) +def test_organization_security_profile_group_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OrganizationSecurityProfileGroupServiceGrpcTransport, grpc_helpers), + ( + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_organization_security_profile_group_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + ], +) +def test_organization_security_profile_group_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_organization_security_profile_group_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OrganizationSecurityProfileGroupServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_organization_security_profile_group_service_host_no_port(transport_name): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_organization_security_profile_group_service_host_with_port(transport_name): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_organization_security_profile_group_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OrganizationSecurityProfileGroupServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OrganizationSecurityProfileGroupServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_security_profile_groups._session + session2 = client2.transport.list_security_profile_groups._session + assert session1 != session2 + session1 = client1.transport.get_security_profile_group._session + session2 = client2.transport.get_security_profile_group._session + assert session1 != session2 + session1 = client1.transport.create_security_profile_group._session + session2 = client2.transport.create_security_profile_group._session + assert session1 != session2 + session1 = client1.transport.update_security_profile_group._session + session2 = client2.transport.update_security_profile_group._session + assert session1 != session2 + session1 = client1.transport.delete_security_profile_group._session + session2 = client2.transport.delete_security_profile_group._session + assert session1 != session2 + session1 = client1.transport.list_security_profiles._session + session2 = client2.transport.list_security_profiles._session + assert session1 != session2 + session1 = client1.transport.get_security_profile._session + session2 = client2.transport.get_security_profile._session + assert session1 != session2 + session1 = client1.transport.create_security_profile._session + session2 = client2.transport.create_security_profile._session + assert session1 != session2 + session1 = client1.transport.update_security_profile._session + session2 = client2.transport.update_security_profile._session + assert session1 != session2 + session1 = client1.transport.delete_security_profile._session + session2 = client2.transport.delete_security_profile._session + assert session1 != session2 + + +def test_organization_security_profile_group_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_organization_security_profile_group_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + ], +) +def test_organization_security_profile_group_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + ], +) +def test_organization_security_profile_group_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_organization_security_profile_group_service_grpc_lro_client(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_organization_security_profile_group_service_grpc_lro_async_client(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_intercept_endpoint_group_path(): + project = "squid" + location = "clam" + intercept_endpoint_group = "whelk" + expected = "projects/{project}/locations/{location}/interceptEndpointGroups/{intercept_endpoint_group}".format( + project=project, + location=location, + intercept_endpoint_group=intercept_endpoint_group, + ) + actual = ( + OrganizationSecurityProfileGroupServiceClient.intercept_endpoint_group_path( + project, location, intercept_endpoint_group + ) + ) + assert expected == actual + + +def test_parse_intercept_endpoint_group_path(): + expected = { + "project": "octopus", + "location": "oyster", + "intercept_endpoint_group": "nudibranch", + } + path = OrganizationSecurityProfileGroupServiceClient.intercept_endpoint_group_path( + **expected + ) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_intercept_endpoint_group_path( + path + ) + assert expected == actual + + +def test_mirroring_endpoint_group_path(): + project = "cuttlefish" + location = "mussel" + mirroring_endpoint_group = "winkle" + expected = "projects/{project}/locations/{location}/mirroringEndpointGroups/{mirroring_endpoint_group}".format( + project=project, + location=location, + mirroring_endpoint_group=mirroring_endpoint_group, + ) + actual = ( + OrganizationSecurityProfileGroupServiceClient.mirroring_endpoint_group_path( + project, location, mirroring_endpoint_group + ) + ) + assert expected == actual + + +def test_parse_mirroring_endpoint_group_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "mirroring_endpoint_group": "abalone", + } + path = OrganizationSecurityProfileGroupServiceClient.mirroring_endpoint_group_path( + **expected + ) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_mirroring_endpoint_group_path( + path + ) + assert expected == actual + + +def test_security_profile_path(): + organization = "squid" + location = "clam" + security_profile = "whelk" + expected = "organizations/{organization}/locations/{location}/securityProfiles/{security_profile}".format( + organization=organization, + location=location, + security_profile=security_profile, + ) + actual = OrganizationSecurityProfileGroupServiceClient.security_profile_path( + organization, location, security_profile + ) + assert expected == actual + + +def test_parse_security_profile_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "security_profile": "nudibranch", + } + path = OrganizationSecurityProfileGroupServiceClient.security_profile_path( + **expected + ) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_security_profile_path( + path + ) + assert expected == actual + + +def test_security_profile_group_path(): + organization = "cuttlefish" + location = "mussel" + security_profile_group = "winkle" + expected = "organizations/{organization}/locations/{location}/securityProfileGroups/{security_profile_group}".format( + organization=organization, + location=location, + security_profile_group=security_profile_group, + ) + actual = OrganizationSecurityProfileGroupServiceClient.security_profile_group_path( + organization, location, security_profile_group + ) + assert expected == actual + + +def test_parse_security_profile_group_path(): + expected = { + "organization": "nautilus", + "location": "scallop", + "security_profile_group": "abalone", + } + path = OrganizationSecurityProfileGroupServiceClient.security_profile_group_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + OrganizationSecurityProfileGroupServiceClient.parse_security_profile_group_path( + path + ) + ) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OrganizationSecurityProfileGroupServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = OrganizationSecurityProfileGroupServiceClient.common_billing_account_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + OrganizationSecurityProfileGroupServiceClient.parse_common_billing_account_path( + path + ) + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OrganizationSecurityProfileGroupServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = OrganizationSecurityProfileGroupServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_common_folder_path( + path + ) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OrganizationSecurityProfileGroupServiceClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = OrganizationSecurityProfileGroupServiceClient.common_organization_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + OrganizationSecurityProfileGroupServiceClient.parse_common_organization_path( + path + ) + ) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = OrganizationSecurityProfileGroupServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = OrganizationSecurityProfileGroupServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_common_project_path( + path + ) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OrganizationSecurityProfileGroupServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = OrganizationSecurityProfileGroupServiceClient.common_location_path( + **expected + ) + + # Check that the path construction is reversible. + actual = OrganizationSecurityProfileGroupServiceClient.parse_common_location_path( + path + ) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceTransport, + "_prep_wrapped_messages", + ) as prep: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OrganizationSecurityProfileGroupServiceTransport, + "_prep_wrapped_messages", + ) as prep: + transport_class = ( + OrganizationSecurityProfileGroupServiceClient.get_transport_class() + ) + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = OrganizationSecurityProfileGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OrganizationSecurityProfileGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + OrganizationSecurityProfileGroupServiceClient, + transports.OrganizationSecurityProfileGroupServiceGrpcTransport, + ), + ( + OrganizationSecurityProfileGroupServiceAsyncClient, + transports.OrganizationSecurityProfileGroupServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_gateway_service.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_gateway_service.py new file mode 100644 index 000000000000..342b7d8b274e --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_gateway_service.py @@ -0,0 +1,10189 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_gateway_service import ( + SSEGatewayServiceAsyncClient, + SSEGatewayServiceClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import common, sse_gateway + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SSEGatewayServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SSEGatewayServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SSEGatewayServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SSEGatewayServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SSEGatewayServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SSEGatewayServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SSEGatewayServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SSEGatewayServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SSEGatewayServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SSEGatewayServiceClient._get_client_cert_source(None, False) is None + assert ( + SSEGatewayServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SSEGatewayServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SSEGatewayServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SSEGatewayServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SSEGatewayServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceClient), +) +@mock.patch.object( + SSEGatewayServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SSEGatewayServiceClient._DEFAULT_UNIVERSE + default_endpoint = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SSEGatewayServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SSEGatewayServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SSEGatewayServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SSEGatewayServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SSEGatewayServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SSEGatewayServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SSEGatewayServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SSEGatewayServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SSEGatewayServiceClient._get_universe_domain(None, None) + == SSEGatewayServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SSEGatewayServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SSEGatewayServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SSEGatewayServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SSEGatewayServiceClient, "grpc"), + (SSEGatewayServiceAsyncClient, "grpc_asyncio"), + (SSEGatewayServiceClient, "rest"), + ], +) +def test_sse_gateway_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SSEGatewayServiceGrpcTransport, "grpc"), + (transports.SSEGatewayServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SSEGatewayServiceRestTransport, "rest"), + ], +) +def test_sse_gateway_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SSEGatewayServiceClient, "grpc"), + (SSEGatewayServiceAsyncClient, "grpc_asyncio"), + (SSEGatewayServiceClient, "rest"), + ], +) +def test_sse_gateway_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_sse_gateway_service_client_get_transport_class(): + transport = SSEGatewayServiceClient.get_transport_class() + available_transports = [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceRestTransport, + ] + assert transport in available_transports + + transport = SSEGatewayServiceClient.get_transport_class("grpc") + assert transport == transports.SSEGatewayServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SSEGatewayServiceClient, transports.SSEGatewayServiceGrpcTransport, "grpc"), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SSEGatewayServiceClient, transports.SSEGatewayServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SSEGatewayServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceClient), +) +@mock.patch.object( + SSEGatewayServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceAsyncClient), +) +def test_sse_gateway_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SSEGatewayServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SSEGatewayServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceGrpcTransport, + "grpc", + "true", + ), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceGrpcTransport, + "grpc", + "false", + ), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceRestTransport, + "rest", + "true", + ), + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SSEGatewayServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceClient), +) +@mock.patch.object( + SSEGatewayServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_sse_gateway_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SSEGatewayServiceClient, SSEGatewayServiceAsyncClient] +) +@mock.patch.object( + SSEGatewayServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SSEGatewayServiceClient), +) +@mock.patch.object( + SSEGatewayServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SSEGatewayServiceAsyncClient), +) +def test_sse_gateway_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SSEGatewayServiceClient, SSEGatewayServiceAsyncClient] +) +@mock.patch.object( + SSEGatewayServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceClient), +) +@mock.patch.object( + SSEGatewayServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSEGatewayServiceAsyncClient), +) +def test_sse_gateway_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SSEGatewayServiceClient._DEFAULT_UNIVERSE + default_endpoint = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SSEGatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SSEGatewayServiceClient, transports.SSEGatewayServiceGrpcTransport, "grpc"), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SSEGatewayServiceClient, transports.SSEGatewayServiceRestTransport, "rest"), + ], +) +def test_sse_gateway_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceRestTransport, + "rest", + None, + ), + ], +) +def test_sse_gateway_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_sse_gateway_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_gateway_service.transports.SSEGatewayServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SSEGatewayServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SSEGatewayServiceClient, + transports.SSEGatewayServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_sse_gateway_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.ListPartnerSSEGatewaysRequest, + dict, + ], +) +def test_list_partner_sse_gateways(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListPartnerSSEGatewaysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.ListPartnerSSEGatewaysRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSEGatewaysPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_partner_sse_gateways_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.ListPartnerSSEGatewaysRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_partner_sse_gateways(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.ListPartnerSSEGatewaysRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_partner_sse_gateways_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_partner_sse_gateways + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_partner_sse_gateways + ] = mock_rpc + request = {} + client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partner_sse_gateways(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_partner_sse_gateways + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_partner_sse_gateways + ] = mock_rpc + + request = {} + await client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_partner_sse_gateways(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.ListPartnerSSEGatewaysRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListPartnerSSEGatewaysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.ListPartnerSSEGatewaysRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSEGatewaysAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_async_from_dict(): + await test_list_partner_sse_gateways_async(request_type=dict) + + +def test_list_partner_sse_gateways_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.ListPartnerSSEGatewaysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + call.return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.ListPartnerSSEGatewaysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListPartnerSSEGatewaysResponse() + ) + await client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_partner_sse_gateways_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_partner_sse_gateways( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_partner_sse_gateways_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partner_sse_gateways( + sse_gateway.ListPartnerSSEGatewaysRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListPartnerSSEGatewaysResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_partner_sse_gateways( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_partner_sse_gateways( + sse_gateway.ListPartnerSSEGatewaysRequest(), + parent="parent_value", + ) + + +def test_list_partner_sse_gateways_pager(transport_name: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="abc", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[], + next_page_token="def", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="ghi", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_partner_sse_gateways( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_gateway.PartnerSSEGateway) for i in results) + + +def test_list_partner_sse_gateways_pages(transport_name: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="abc", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[], + next_page_token="def", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="ghi", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + ), + RuntimeError, + ) + pages = list(client.list_partner_sse_gateways(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_async_pager(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="abc", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[], + next_page_token="def", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="ghi", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_partner_sse_gateways( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, sse_gateway.PartnerSSEGateway) for i in responses) + + +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_async_pages(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="abc", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[], + next_page_token="def", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="ghi", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_partner_sse_gateways(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.GetPartnerSSEGatewayRequest, + dict, + ], +) +def test_get_partner_sse_gateway(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.PartnerSSEGateway( + name="name_value", + sse_vpc_subnet_range="sse_vpc_subnet_range_value", + sse_vpc_target_ip="sse_vpc_target_ip_value", + sse_gateway_reference_id="sse_gateway_reference_id_value", + sse_bgp_ips=["sse_bgp_ips_value"], + sse_bgp_asn=1156, + partner_vpc_subnet_range="partner_vpc_subnet_range_value", + partner_sse_realm="partner_sse_realm_value", + sse_subnet_range="sse_subnet_range_value", + sse_target_ip="sse_target_ip_value", + partner_subnet_range="partner_subnet_range_value", + vni=333, + sse_project="sse_project_value", + sse_network="sse_network_value", + partner_sse_environment="partner_sse_environment_value", + country="country_value", + timezone="timezone_value", + capacity_bps=1266, + state=sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED, + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + response = client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.GetPartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.PartnerSSEGateway) + assert response.name == "name_value" + assert response.sse_vpc_subnet_range == "sse_vpc_subnet_range_value" + assert response.sse_vpc_target_ip == "sse_vpc_target_ip_value" + assert response.sse_gateway_reference_id == "sse_gateway_reference_id_value" + assert response.sse_bgp_ips == ["sse_bgp_ips_value"] + assert response.sse_bgp_asn == 1156 + assert response.partner_vpc_subnet_range == "partner_vpc_subnet_range_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.sse_subnet_range == "sse_subnet_range_value" + assert response.sse_target_ip == "sse_target_ip_value" + assert response.partner_subnet_range == "partner_subnet_range_value" + assert response.vni == 333 + assert response.sse_project == "sse_project_value" + assert response.sse_network == "sse_network_value" + assert response.partner_sse_environment == "partner_sse_environment_value" + assert response.country == "country_value" + assert response.timezone == "timezone_value" + assert response.capacity_bps == 1266 + assert response.state == sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +def test_get_partner_sse_gateway_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_partner_sse_gateway(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.GetPartnerSSEGatewayRequest( + name="name_value", + ) + + +def test_get_partner_sse_gateway_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_partner_sse_gateway + ] = mock_rpc + request = {} + client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_partner_sse_gateway + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_partner_sse_gateway + ] = mock_rpc + + request = {} + await client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.GetPartnerSSEGatewayRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.PartnerSSEGateway( + name="name_value", + sse_vpc_subnet_range="sse_vpc_subnet_range_value", + sse_vpc_target_ip="sse_vpc_target_ip_value", + sse_gateway_reference_id="sse_gateway_reference_id_value", + sse_bgp_ips=["sse_bgp_ips_value"], + sse_bgp_asn=1156, + partner_vpc_subnet_range="partner_vpc_subnet_range_value", + partner_sse_realm="partner_sse_realm_value", + sse_subnet_range="sse_subnet_range_value", + sse_target_ip="sse_target_ip_value", + partner_subnet_range="partner_subnet_range_value", + vni=333, + sse_project="sse_project_value", + sse_network="sse_network_value", + partner_sse_environment="partner_sse_environment_value", + country="country_value", + timezone="timezone_value", + capacity_bps=1266, + state=sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED, + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + ) + response = await client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.GetPartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.PartnerSSEGateway) + assert response.name == "name_value" + assert response.sse_vpc_subnet_range == "sse_vpc_subnet_range_value" + assert response.sse_vpc_target_ip == "sse_vpc_target_ip_value" + assert response.sse_gateway_reference_id == "sse_gateway_reference_id_value" + assert response.sse_bgp_ips == ["sse_bgp_ips_value"] + assert response.sse_bgp_asn == 1156 + assert response.partner_vpc_subnet_range == "partner_vpc_subnet_range_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.sse_subnet_range == "sse_subnet_range_value" + assert response.sse_target_ip == "sse_target_ip_value" + assert response.partner_subnet_range == "partner_subnet_range_value" + assert response.vni == 333 + assert response.sse_project == "sse_project_value" + assert response.sse_network == "sse_network_value" + assert response.partner_sse_environment == "partner_sse_environment_value" + assert response.country == "country_value" + assert response.timezone == "timezone_value" + assert response.capacity_bps == 1266 + assert response.state == sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_async_from_dict(): + await test_get_partner_sse_gateway_async(request_type=dict) + + +def test_get_partner_sse_gateway_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.GetPartnerSSEGatewayRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + call.return_value = sse_gateway.PartnerSSEGateway() + client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.GetPartnerSSEGatewayRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.PartnerSSEGateway() + ) + await client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_partner_sse_gateway_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.PartnerSSEGateway() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_partner_sse_gateway( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_partner_sse_gateway_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partner_sse_gateway( + sse_gateway.GetPartnerSSEGatewayRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.PartnerSSEGateway() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.PartnerSSEGateway() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_partner_sse_gateway( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_partner_sse_gateway( + sse_gateway.GetPartnerSSEGatewayRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.CreatePartnerSSEGatewayRequest, + dict, + ], +) +def test_create_partner_sse_gateway(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.CreatePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_partner_sse_gateway_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_partner_sse_gateway(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.CreatePartnerSSEGatewayRequest( + parent="parent_value", + partner_sse_gateway_id="partner_sse_gateway_id_value", + request_id="request_id_value", + ) + + +def test_create_partner_sse_gateway_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_partner_sse_gateway + ] = mock_rpc + request = {} + client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_partner_sse_gateway + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_partner_sse_gateway + ] = mock_rpc + + request = {} + await client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.CreatePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.CreatePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_async_from_dict(): + await test_create_partner_sse_gateway_async(request_type=dict) + + +def test_create_partner_sse_gateway_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.CreatePartnerSSEGatewayRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.CreatePartnerSSEGatewayRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_partner_sse_gateway_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_partner_sse_gateway( + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partner_sse_gateway + mock_val = sse_gateway.PartnerSSEGateway(name="name_value") + assert arg == mock_val + arg = args[0].partner_sse_gateway_id + mock_val = "partner_sse_gateway_id_value" + assert arg == mock_val + + +def test_create_partner_sse_gateway_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partner_sse_gateway( + sse_gateway.CreatePartnerSSEGatewayRequest(), + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_partner_sse_gateway( + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partner_sse_gateway + mock_val = sse_gateway.PartnerSSEGateway(name="name_value") + assert arg == mock_val + arg = args[0].partner_sse_gateway_id + mock_val = "partner_sse_gateway_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_partner_sse_gateway( + sse_gateway.CreatePartnerSSEGatewayRequest(), + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.DeletePartnerSSEGatewayRequest, + dict, + ], +) +def test_delete_partner_sse_gateway(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.DeletePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_partner_sse_gateway_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.DeletePartnerSSEGatewayRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_partner_sse_gateway(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.DeletePartnerSSEGatewayRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_partner_sse_gateway_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_partner_sse_gateway + ] = mock_rpc + request = {} + client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_partner_sse_gateway + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_partner_sse_gateway + ] = mock_rpc + + request = {} + await client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.DeletePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.DeletePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_async_from_dict(): + await test_delete_partner_sse_gateway_async(request_type=dict) + + +def test_delete_partner_sse_gateway_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.DeletePartnerSSEGatewayRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.DeletePartnerSSEGatewayRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_partner_sse_gateway_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_partner_sse_gateway( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_partner_sse_gateway_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partner_sse_gateway( + sse_gateway.DeletePartnerSSEGatewayRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_partner_sse_gateway( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_partner_sse_gateway( + sse_gateway.DeletePartnerSSEGatewayRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.UpdatePartnerSSEGatewayRequest, + dict, + ], +) +def test_update_partner_sse_gateway(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.UpdatePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_partner_sse_gateway_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.UpdatePartnerSSEGatewayRequest( + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_partner_sse_gateway(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.UpdatePartnerSSEGatewayRequest( + request_id="request_id_value", + ) + + +def test_update_partner_sse_gateway_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_partner_sse_gateway + ] = mock_rpc + request = {} + client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_partner_sse_gateway + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_partner_sse_gateway + ] = mock_rpc + + request = {} + await client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.UpdatePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.UpdatePartnerSSEGatewayRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_async_from_dict(): + await test_update_partner_sse_gateway_async(request_type=dict) + + +def test_update_partner_sse_gateway_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.UpdatePartnerSSEGatewayRequest() + + request.partner_sse_gateway.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "partner_sse_gateway.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.UpdatePartnerSSEGatewayRequest() + + request.partner_sse_gateway.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "partner_sse_gateway.name=name_value", + ) in kw["metadata"] + + +def test_update_partner_sse_gateway_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_partner_sse_gateway( + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].partner_sse_gateway + mock_val = sse_gateway.PartnerSSEGateway(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_partner_sse_gateway_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_partner_sse_gateway( + sse_gateway.UpdatePartnerSSEGatewayRequest(), + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_partner_sse_gateway( + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].partner_sse_gateway + mock_val = sse_gateway.PartnerSSEGateway(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_partner_sse_gateway( + sse_gateway.UpdatePartnerSSEGatewayRequest(), + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.ListSSEGatewayReferencesRequest, + dict, + ], +) +def test_list_sse_gateway_references(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListSSEGatewayReferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.ListSSEGatewayReferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSSEGatewayReferencesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_sse_gateway_references_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.ListSSEGatewayReferencesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sse_gateway_references(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.ListSSEGatewayReferencesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_sse_gateway_references_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_sse_gateway_references + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sse_gateway_references + ] = mock_rpc + request = {} + client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sse_gateway_references(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sse_gateway_references + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sse_gateway_references + ] = mock_rpc + + request = {} + await client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sse_gateway_references(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.ListSSEGatewayReferencesRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListSSEGatewayReferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.ListSSEGatewayReferencesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSSEGatewayReferencesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_async_from_dict(): + await test_list_sse_gateway_references_async(request_type=dict) + + +def test_list_sse_gateway_references_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.ListSSEGatewayReferencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + call.return_value = sse_gateway.ListSSEGatewayReferencesResponse() + client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.ListSSEGatewayReferencesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListSSEGatewayReferencesResponse() + ) + await client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sse_gateway_references_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListSSEGatewayReferencesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sse_gateway_references( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sse_gateway_references_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sse_gateway_references( + sse_gateway.ListSSEGatewayReferencesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.ListSSEGatewayReferencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListSSEGatewayReferencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sse_gateway_references( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sse_gateway_references( + sse_gateway.ListSSEGatewayReferencesRequest(), + parent="parent_value", + ) + + +def test_list_sse_gateway_references_pager(transport_name: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + next_page_token="abc", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[], + next_page_token="def", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + ], + next_page_token="ghi", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sse_gateway_references( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_gateway.SSEGatewayReference) for i in results) + + +def test_list_sse_gateway_references_pages(transport_name: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + next_page_token="abc", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[], + next_page_token="def", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + ], + next_page_token="ghi", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sse_gateway_references(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_async_pager(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + next_page_token="abc", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[], + next_page_token="def", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + ], + next_page_token="ghi", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sse_gateway_references( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, sse_gateway.SSEGatewayReference) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sse_gateway_references_async_pages(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + next_page_token="abc", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[], + next_page_token="def", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + ], + next_page_token="ghi", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sse_gateway_references(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.GetSSEGatewayReferenceRequest, + dict, + ], +) +def test_get_sse_gateway_reference(request_type, transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.SSEGatewayReference( + name="name_value", + partner_sse_realm="partner_sse_realm_value", + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + response = client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_gateway.GetSSEGatewayReferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.SSEGatewayReference) + assert response.name == "name_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +def test_get_sse_gateway_reference_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_gateway.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sse_gateway_reference(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_gateway.GetSSEGatewayReferenceRequest( + name="name_value", + ) + + +def test_get_sse_gateway_reference_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_sse_gateway_reference + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_sse_gateway_reference + ] = mock_rpc + request = {} + client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sse_gateway_reference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sse_gateway_reference + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sse_gateway_reference + ] = mock_rpc + + request = {} + await client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_sse_gateway_reference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_async( + transport: str = "grpc_asyncio", + request_type=sse_gateway.GetSSEGatewayReferenceRequest, +): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.SSEGatewayReference( + name="name_value", + partner_sse_realm="partner_sse_realm_value", + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + ) + response = await client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_gateway.GetSSEGatewayReferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.SSEGatewayReference) + assert response.name == "name_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_async_from_dict(): + await test_get_sse_gateway_reference_async(request_type=dict) + + +def test_get_sse_gateway_reference_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.GetSSEGatewayReferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + call.return_value = sse_gateway.SSEGatewayReference() + client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_gateway.GetSSEGatewayReferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.SSEGatewayReference() + ) + await client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_sse_gateway_reference_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.SSEGatewayReference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sse_gateway_reference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_sse_gateway_reference_flattened_error(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sse_gateway_reference( + sse_gateway.GetSSEGatewayReferenceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_flattened_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_gateway.SSEGatewayReference() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.SSEGatewayReference() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sse_gateway_reference( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_flattened_error_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sse_gateway_reference( + sse_gateway.GetSSEGatewayReferenceRequest(), + name="name_value", + ) + + +def test_list_partner_sse_gateways_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_partner_sse_gateways + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_partner_sse_gateways + ] = mock_rpc + + request = {} + client.list_partner_sse_gateways(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partner_sse_gateways(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_partner_sse_gateways_rest_required_fields( + request_type=sse_gateway.ListPartnerSSEGatewaysRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_partner_sse_gateways._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_partner_sse_gateways._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.ListPartnerSSEGatewaysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_partner_sse_gateways(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_partner_sse_gateways_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_partner_sse_gateways._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_partner_sse_gateways_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_gateway.ListPartnerSSEGatewaysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_partner_sse_gateways(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/partnerSSEGateways" + % client.transport._host, + args[1], + ) + + +def test_list_partner_sse_gateways_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partner_sse_gateways( + sse_gateway.ListPartnerSSEGatewaysRequest(), + parent="parent_value", + ) + + +def test_list_partner_sse_gateways_rest_pager(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="abc", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[], + next_page_token="def", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + ], + next_page_token="ghi", + ), + sse_gateway.ListPartnerSSEGatewaysResponse( + partner_sse_gateways=[ + sse_gateway.PartnerSSEGateway(), + sse_gateway.PartnerSSEGateway(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + sse_gateway.ListPartnerSSEGatewaysResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_partner_sse_gateways(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_gateway.PartnerSSEGateway) for i in results) + + pages = list(client.list_partner_sse_gateways(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_partner_sse_gateway_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_partner_sse_gateway + ] = mock_rpc + + request = {} + client.get_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_partner_sse_gateway_rest_required_fields( + request_type=sse_gateway.GetPartnerSSEGatewayRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner_sse_gateway._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner_sse_gateway._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_gateway.PartnerSSEGateway() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.PartnerSSEGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_partner_sse_gateway(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_partner_sse_gateway_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_partner_sse_gateway._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_partner_sse_gateway_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.PartnerSSEGateway() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_gateway.PartnerSSEGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_partner_sse_gateway(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/partnerSSEGateways/*}" + % client.transport._host, + args[1], + ) + + +def test_get_partner_sse_gateway_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partner_sse_gateway( + sse_gateway.GetPartnerSSEGatewayRequest(), + name="name_value", + ) + + +def test_create_partner_sse_gateway_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_partner_sse_gateway + ] = mock_rpc + + request = {} + client.create_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_partner_sse_gateway_rest_required_fields( + request_type=sse_gateway.CreatePartnerSSEGatewayRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["partner_sse_gateway_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "partnerSseGatewayId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_partner_sse_gateway._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "partnerSseGatewayId" in jsonified_request + assert ( + jsonified_request["partnerSseGatewayId"] + == request_init["partner_sse_gateway_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["partnerSseGatewayId"] = "partner_sse_gateway_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_partner_sse_gateway._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "partner_sse_gateway_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "partnerSseGatewayId" in jsonified_request + assert jsonified_request["partnerSseGatewayId"] == "partner_sse_gateway_id_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_partner_sse_gateway(request) + + expected_params = [ + ( + "partnerSseGatewayId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_partner_sse_gateway_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_partner_sse_gateway._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "partnerSseGatewayId", + "requestId", + ) + ) + & set( + ( + "parent", + "partnerSseGatewayId", + "partnerSseGateway", + ) + ) + ) + + +def test_create_partner_sse_gateway_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_partner_sse_gateway(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/partnerSSEGateways" + % client.transport._host, + args[1], + ) + + +def test_create_partner_sse_gateway_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partner_sse_gateway( + sse_gateway.CreatePartnerSSEGatewayRequest(), + parent="parent_value", + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + partner_sse_gateway_id="partner_sse_gateway_id_value", + ) + + +def test_delete_partner_sse_gateway_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_partner_sse_gateway + ] = mock_rpc + + request = {} + client.delete_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_partner_sse_gateway_rest_required_fields( + request_type=sse_gateway.DeletePartnerSSEGatewayRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_partner_sse_gateway._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_partner_sse_gateway._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_partner_sse_gateway(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_partner_sse_gateway_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_partner_sse_gateway._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_partner_sse_gateway_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_partner_sse_gateway(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/partnerSSEGateways/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_partner_sse_gateway_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partner_sse_gateway( + sse_gateway.DeletePartnerSSEGatewayRequest(), + name="name_value", + ) + + +def test_update_partner_sse_gateway_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_partner_sse_gateway + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_partner_sse_gateway + ] = mock_rpc + + request = {} + client.update_partner_sse_gateway(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_partner_sse_gateway(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_partner_sse_gateway_rest_required_fields( + request_type=sse_gateway.UpdatePartnerSSEGatewayRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_partner_sse_gateway._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_partner_sse_gateway._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_partner_sse_gateway(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_partner_sse_gateway_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_partner_sse_gateway._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("partnerSseGateway",)) + ) + + +def test_update_partner_sse_gateway_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "partner_sse_gateway": { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_partner_sse_gateway(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{partner_sse_gateway.name=projects/*/locations/*/partnerSSEGateways/*}" + % client.transport._host, + args[1], + ) + + +def test_update_partner_sse_gateway_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_partner_sse_gateway( + sse_gateway.UpdatePartnerSSEGatewayRequest(), + partner_sse_gateway=sse_gateway.PartnerSSEGateway(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_sse_gateway_references_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_sse_gateway_references + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sse_gateway_references + ] = mock_rpc + + request = {} + client.list_sse_gateway_references(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sse_gateway_references(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sse_gateway_references_rest_required_fields( + request_type=sse_gateway.ListSSEGatewayReferencesRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sse_gateway_references._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sse_gateway_references._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListSSEGatewayReferencesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.ListSSEGatewayReferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sse_gateway_references(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sse_gateway_references_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sse_gateway_references._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_sse_gateway_references_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListSSEGatewayReferencesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_gateway.ListSSEGatewayReferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sse_gateway_references(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/sseGatewayReferences" + % client.transport._host, + args[1], + ) + + +def test_list_sse_gateway_references_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sse_gateway_references( + sse_gateway.ListSSEGatewayReferencesRequest(), + parent="parent_value", + ) + + +def test_list_sse_gateway_references_rest_pager(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + next_page_token="abc", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[], + next_page_token="def", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + ], + next_page_token="ghi", + ), + sse_gateway.ListSSEGatewayReferencesResponse( + sse_gateway_references=[ + sse_gateway.SSEGatewayReference(), + sse_gateway.SSEGatewayReference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + sse_gateway.ListSSEGatewayReferencesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sse_gateway_references(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_gateway.SSEGatewayReference) for i in results) + + pages = list(client.list_sse_gateway_references(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_sse_gateway_reference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_sse_gateway_reference + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_sse_gateway_reference + ] = mock_rpc + + request = {} + client.get_sse_gateway_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sse_gateway_reference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_sse_gateway_reference_rest_required_fields( + request_type=sse_gateway.GetSSEGatewayReferenceRequest, +): + transport_class = transports.SSEGatewayServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sse_gateway_reference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sse_gateway_reference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_gateway.SSEGatewayReference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.SSEGatewayReference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_sse_gateway_reference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_sse_gateway_reference_rest_unset_required_fields(): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_sse_gateway_reference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_sse_gateway_reference_rest_flattened(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.SSEGatewayReference() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sseGatewayReferences/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_gateway.SSEGatewayReference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_sse_gateway_reference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/sseGatewayReferences/*}" + % client.transport._host, + args[1], + ) + + +def test_get_sse_gateway_reference_rest_flattened_error(transport: str = "rest"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sse_gateway_reference( + sse_gateway.GetSSEGatewayReferenceRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSEGatewayServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SSEGatewayServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SSEGatewayServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSEGatewayServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SSEGatewayServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SSEGatewayServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SSEGatewayServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + transports.SSEGatewayServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SSEGatewayServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partner_sse_gateways_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + call.return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + client.list_partner_sse_gateways(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListPartnerSSEGatewaysRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_sse_gateway_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + call.return_value = sse_gateway.PartnerSSEGateway() + client.get_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetPartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partner_sse_gateway_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.CreatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partner_sse_gateway_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.DeletePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_partner_sse_gateway_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.UpdatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sse_gateway_references_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + call.return_value = sse_gateway.ListSSEGatewayReferencesResponse() + client.list_sse_gateway_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListSSEGatewayReferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sse_gateway_reference_empty_call_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + call.return_value = sse_gateway.SSEGatewayReference() + client.get_sse_gateway_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetSSEGatewayReferenceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SSEGatewayServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_partner_sse_gateways_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListPartnerSSEGatewaysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_partner_sse_gateways(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListPartnerSSEGatewaysRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partner_sse_gateway_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.PartnerSSEGateway( + name="name_value", + sse_vpc_subnet_range="sse_vpc_subnet_range_value", + sse_vpc_target_ip="sse_vpc_target_ip_value", + sse_gateway_reference_id="sse_gateway_reference_id_value", + sse_bgp_ips=["sse_bgp_ips_value"], + sse_bgp_asn=1156, + partner_vpc_subnet_range="partner_vpc_subnet_range_value", + partner_sse_realm="partner_sse_realm_value", + sse_subnet_range="sse_subnet_range_value", + sse_target_ip="sse_target_ip_value", + partner_subnet_range="partner_subnet_range_value", + vni=333, + sse_project="sse_project_value", + sse_network="sse_network_value", + partner_sse_environment="partner_sse_environment_value", + country="country_value", + timezone="timezone_value", + capacity_bps=1266, + state=sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED, + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + ) + await client.get_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetPartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_partner_sse_gateway_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.CreatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_partner_sse_gateway_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.DeletePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_partner_sse_gateway_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.UpdatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sse_gateway_references_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.ListSSEGatewayReferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_sse_gateway_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListSSEGatewayReferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sse_gateway_reference_empty_call_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_gateway.SSEGatewayReference( + name="name_value", + partner_sse_realm="partner_sse_realm_value", + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + ) + await client.get_sse_gateway_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetSSEGatewayReferenceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SSEGatewayServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_partner_sse_gateways_rest_bad_request( + request_type=sse_gateway.ListPartnerSSEGatewaysRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_partner_sse_gateways(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.ListPartnerSSEGatewaysRequest, + dict, + ], +) +def test_list_partner_sse_gateways_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListPartnerSSEGatewaysResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.ListPartnerSSEGatewaysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_partner_sse_gateways(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSEGatewaysPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_partner_sse_gateways_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_list_partner_sse_gateways" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_list_partner_sse_gateways_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_list_partner_sse_gateways" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.ListPartnerSSEGatewaysRequest.pb( + sse_gateway.ListPartnerSSEGatewaysRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_gateway.ListPartnerSSEGatewaysResponse.to_json( + sse_gateway.ListPartnerSSEGatewaysResponse() + ) + req.return_value.content = return_value + + request = sse_gateway.ListPartnerSSEGatewaysRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_gateway.ListPartnerSSEGatewaysResponse() + post_with_metadata.return_value = ( + sse_gateway.ListPartnerSSEGatewaysResponse(), + metadata, + ) + + client.list_partner_sse_gateways( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_partner_sse_gateway_rest_bad_request( + request_type=sse_gateway.GetPartnerSSEGatewayRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_partner_sse_gateway(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.GetPartnerSSEGatewayRequest, + dict, + ], +) +def test_get_partner_sse_gateway_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.PartnerSSEGateway( + name="name_value", + sse_vpc_subnet_range="sse_vpc_subnet_range_value", + sse_vpc_target_ip="sse_vpc_target_ip_value", + sse_gateway_reference_id="sse_gateway_reference_id_value", + sse_bgp_ips=["sse_bgp_ips_value"], + sse_bgp_asn=1156, + partner_vpc_subnet_range="partner_vpc_subnet_range_value", + partner_sse_realm="partner_sse_realm_value", + sse_subnet_range="sse_subnet_range_value", + sse_target_ip="sse_target_ip_value", + partner_subnet_range="partner_subnet_range_value", + vni=333, + sse_project="sse_project_value", + sse_network="sse_network_value", + partner_sse_environment="partner_sse_environment_value", + country="country_value", + timezone="timezone_value", + capacity_bps=1266, + state=sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED, + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.PartnerSSEGateway.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_partner_sse_gateway(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.PartnerSSEGateway) + assert response.name == "name_value" + assert response.sse_vpc_subnet_range == "sse_vpc_subnet_range_value" + assert response.sse_vpc_target_ip == "sse_vpc_target_ip_value" + assert response.sse_gateway_reference_id == "sse_gateway_reference_id_value" + assert response.sse_bgp_ips == ["sse_bgp_ips_value"] + assert response.sse_bgp_asn == 1156 + assert response.partner_vpc_subnet_range == "partner_vpc_subnet_range_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.sse_subnet_range == "sse_subnet_range_value" + assert response.sse_target_ip == "sse_target_ip_value" + assert response.partner_subnet_range == "partner_subnet_range_value" + assert response.vni == 333 + assert response.sse_project == "sse_project_value" + assert response.sse_network == "sse_network_value" + assert response.partner_sse_environment == "partner_sse_environment_value" + assert response.country == "country_value" + assert response.timezone == "timezone_value" + assert response.capacity_bps == 1266 + assert response.state == sse_gateway.PartnerSSEGateway.State.CUSTOMER_ATTACHED + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_partner_sse_gateway_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_get_partner_sse_gateway" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_get_partner_sse_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_get_partner_sse_gateway" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.GetPartnerSSEGatewayRequest.pb( + sse_gateway.GetPartnerSSEGatewayRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_gateway.PartnerSSEGateway.to_json( + sse_gateway.PartnerSSEGateway() + ) + req.return_value.content = return_value + + request = sse_gateway.GetPartnerSSEGatewayRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_gateway.PartnerSSEGateway() + post_with_metadata.return_value = sse_gateway.PartnerSSEGateway(), metadata + + client.get_partner_sse_gateway( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_partner_sse_gateway_rest_bad_request( + request_type=sse_gateway.CreatePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_partner_sse_gateway(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.CreatePartnerSSEGatewayRequest, + dict, + ], +) +def test_create_partner_sse_gateway_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["partner_sse_gateway"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "sse_vpc_subnet_range": "sse_vpc_subnet_range_value", + "sse_vpc_target_ip": "sse_vpc_target_ip_value", + "sse_gateway_reference_id": "sse_gateway_reference_id_value", + "sse_bgp_ips": ["sse_bgp_ips_value1", "sse_bgp_ips_value2"], + "sse_bgp_asn": 1156, + "partner_vpc_subnet_range": "partner_vpc_subnet_range_value", + "partner_sse_realm": "partner_sse_realm_value", + "sse_subnet_range": "sse_subnet_range_value", + "sse_target_ip": "sse_target_ip_value", + "partner_subnet_range": "partner_subnet_range_value", + "vni": 333, + "symantec_options": { + "symantec_location_uuid": "symantec_location_uuid_value", + "symantec_site_target_host": "symantec_site_target_host_value", + "symantec_site": "symantec_site_value", + }, + "sse_project": "sse_project_value", + "sse_network": "sse_network_value", + "partner_sse_environment": "partner_sse_environment_value", + "country": "country_value", + "timezone": "timezone_value", + "capacity_bps": 1266, + "state": 1, + "prober_subnet_ranges": [ + "prober_subnet_ranges_value1", + "prober_subnet_ranges_value2", + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = sse_gateway.CreatePartnerSSEGatewayRequest.meta.fields[ + "partner_sse_gateway" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["partner_sse_gateway"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["partner_sse_gateway"][field])): + del request_init["partner_sse_gateway"][field][i][subfield] + else: + del request_init["partner_sse_gateway"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_partner_sse_gateway(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_partner_sse_gateway_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_create_partner_sse_gateway" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_create_partner_sse_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_create_partner_sse_gateway" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.CreatePartnerSSEGatewayRequest.pb( + sse_gateway.CreatePartnerSSEGatewayRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_gateway.CreatePartnerSSEGatewayRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_partner_sse_gateway( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_partner_sse_gateway_rest_bad_request( + request_type=sse_gateway.DeletePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_partner_sse_gateway(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.DeletePartnerSSEGatewayRequest, + dict, + ], +) +def test_delete_partner_sse_gateway_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_partner_sse_gateway(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_partner_sse_gateway_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_delete_partner_sse_gateway" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_delete_partner_sse_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_delete_partner_sse_gateway" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.DeletePartnerSSEGatewayRequest.pb( + sse_gateway.DeletePartnerSSEGatewayRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_gateway.DeletePartnerSSEGatewayRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_partner_sse_gateway( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_partner_sse_gateway_rest_bad_request( + request_type=sse_gateway.UpdatePartnerSSEGatewayRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "partner_sse_gateway": { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_partner_sse_gateway(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.UpdatePartnerSSEGatewayRequest, + dict, + ], +) +def test_update_partner_sse_gateway_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "partner_sse_gateway": { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3" + } + } + request_init["partner_sse_gateway"] = { + "name": "projects/sample1/locations/sample2/partnerSSEGateways/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "sse_vpc_subnet_range": "sse_vpc_subnet_range_value", + "sse_vpc_target_ip": "sse_vpc_target_ip_value", + "sse_gateway_reference_id": "sse_gateway_reference_id_value", + "sse_bgp_ips": ["sse_bgp_ips_value1", "sse_bgp_ips_value2"], + "sse_bgp_asn": 1156, + "partner_vpc_subnet_range": "partner_vpc_subnet_range_value", + "partner_sse_realm": "partner_sse_realm_value", + "sse_subnet_range": "sse_subnet_range_value", + "sse_target_ip": "sse_target_ip_value", + "partner_subnet_range": "partner_subnet_range_value", + "vni": 333, + "symantec_options": { + "symantec_location_uuid": "symantec_location_uuid_value", + "symantec_site_target_host": "symantec_site_target_host_value", + "symantec_site": "symantec_site_value", + }, + "sse_project": "sse_project_value", + "sse_network": "sse_network_value", + "partner_sse_environment": "partner_sse_environment_value", + "country": "country_value", + "timezone": "timezone_value", + "capacity_bps": 1266, + "state": 1, + "prober_subnet_ranges": [ + "prober_subnet_ranges_value1", + "prober_subnet_ranges_value2", + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = sse_gateway.UpdatePartnerSSEGatewayRequest.meta.fields[ + "partner_sse_gateway" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["partner_sse_gateway"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["partner_sse_gateway"][field])): + del request_init["partner_sse_gateway"][field][i][subfield] + else: + del request_init["partner_sse_gateway"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_partner_sse_gateway(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_partner_sse_gateway_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_update_partner_sse_gateway" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_update_partner_sse_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_update_partner_sse_gateway" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.UpdatePartnerSSEGatewayRequest.pb( + sse_gateway.UpdatePartnerSSEGatewayRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_gateway.UpdatePartnerSSEGatewayRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_partner_sse_gateway( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_sse_gateway_references_rest_bad_request( + request_type=sse_gateway.ListSSEGatewayReferencesRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sse_gateway_references(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.ListSSEGatewayReferencesRequest, + dict, + ], +) +def test_list_sse_gateway_references_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.ListSSEGatewayReferencesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.ListSSEGatewayReferencesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sse_gateway_references(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSSEGatewayReferencesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sse_gateway_references_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_list_sse_gateway_references" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_list_sse_gateway_references_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_list_sse_gateway_references" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.ListSSEGatewayReferencesRequest.pb( + sse_gateway.ListSSEGatewayReferencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_gateway.ListSSEGatewayReferencesResponse.to_json( + sse_gateway.ListSSEGatewayReferencesResponse() + ) + req.return_value.content = return_value + + request = sse_gateway.ListSSEGatewayReferencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_gateway.ListSSEGatewayReferencesResponse() + post_with_metadata.return_value = ( + sse_gateway.ListSSEGatewayReferencesResponse(), + metadata, + ) + + client.list_sse_gateway_references( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_sse_gateway_reference_rest_bad_request( + request_type=sse_gateway.GetSSEGatewayReferenceRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/sseGatewayReferences/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_sse_gateway_reference(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_gateway.GetSSEGatewayReferenceRequest, + dict, + ], +) +def test_get_sse_gateway_reference_rest_call_success(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/sseGatewayReferences/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_gateway.SSEGatewayReference( + name="name_value", + partner_sse_realm="partner_sse_realm_value", + prober_subnet_ranges=["prober_subnet_ranges_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_gateway.SSEGatewayReference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_sse_gateway_reference(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_gateway.SSEGatewayReference) + assert response.name == "name_value" + assert response.partner_sse_realm == "partner_sse_realm_value" + assert response.prober_subnet_ranges == ["prober_subnet_ranges_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_sse_gateway_reference_rest_interceptors(null_interceptor): + transport = transports.SSEGatewayServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSEGatewayServiceRestInterceptor(), + ) + client = SSEGatewayServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "post_get_sse_gateway_reference" + ) as post, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, + "post_get_sse_gateway_reference_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSEGatewayServiceRestInterceptor, "pre_get_sse_gateway_reference" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_gateway.GetSSEGatewayReferenceRequest.pb( + sse_gateway.GetSSEGatewayReferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_gateway.SSEGatewayReference.to_json( + sse_gateway.SSEGatewayReference() + ) + req.return_value.content = return_value + + request = sse_gateway.GetSSEGatewayReferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_gateway.SSEGatewayReference() + post_with_metadata.return_value = sse_gateway.SSEGatewayReference(), metadata + + client.get_sse_gateway_reference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partner_sse_gateways_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_gateways), "__call__" + ) as call: + client.list_partner_sse_gateways(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListPartnerSSEGatewaysRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_sse_gateway_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_gateway), "__call__" + ) as call: + client.get_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetPartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partner_sse_gateway_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_gateway), "__call__" + ) as call: + client.create_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.CreatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partner_sse_gateway_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_gateway), "__call__" + ) as call: + client.delete_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.DeletePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_partner_sse_gateway_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_partner_sse_gateway), "__call__" + ) as call: + client.update_partner_sse_gateway(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.UpdatePartnerSSEGatewayRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sse_gateway_references_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sse_gateway_references), "__call__" + ) as call: + client.list_sse_gateway_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.ListSSEGatewayReferencesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sse_gateway_reference_empty_call_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sse_gateway_reference), "__call__" + ) as call: + client.get_sse_gateway_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_gateway.GetSSEGatewayReferenceRequest() + + assert args[0] == request_msg + + +def test_sse_gateway_service_rest_lro_client(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SSEGatewayServiceGrpcTransport, + ) + + +def test_sse_gateway_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SSEGatewayServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_sse_gateway_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_gateway_service.transports.SSEGatewayServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SSEGatewayServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_partner_sse_gateways", + "get_partner_sse_gateway", + "create_partner_sse_gateway", + "delete_partner_sse_gateway", + "update_partner_sse_gateway", + "list_sse_gateway_references", + "get_sse_gateway_reference", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_sse_gateway_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_gateway_service.transports.SSEGatewayServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SSEGatewayServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_sse_gateway_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_gateway_service.transports.SSEGatewayServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SSEGatewayServiceTransport() + adc.assert_called_once() + + +def test_sse_gateway_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SSEGatewayServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + ], +) +def test_sse_gateway_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + transports.SSEGatewayServiceRestTransport, + ], +) +def test_sse_gateway_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SSEGatewayServiceGrpcTransport, grpc_helpers), + (transports.SSEGatewayServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_sse_gateway_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + ], +) +def test_sse_gateway_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_sse_gateway_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SSEGatewayServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_sse_gateway_service_host_no_port(transport_name): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_sse_gateway_service_host_with_port(transport_name): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_sse_gateway_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SSEGatewayServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SSEGatewayServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_partner_sse_gateways._session + session2 = client2.transport.list_partner_sse_gateways._session + assert session1 != session2 + session1 = client1.transport.get_partner_sse_gateway._session + session2 = client2.transport.get_partner_sse_gateway._session + assert session1 != session2 + session1 = client1.transport.create_partner_sse_gateway._session + session2 = client2.transport.create_partner_sse_gateway._session + assert session1 != session2 + session1 = client1.transport.delete_partner_sse_gateway._session + session2 = client2.transport.delete_partner_sse_gateway._session + assert session1 != session2 + session1 = client1.transport.update_partner_sse_gateway._session + session2 = client2.transport.update_partner_sse_gateway._session + assert session1 != session2 + session1 = client1.transport.list_sse_gateway_references._session + session2 = client2.transport.list_sse_gateway_references._session + assert session1 != session2 + session1 = client1.transport.get_sse_gateway_reference._session + session2 = client2.transport.get_sse_gateway_reference._session + assert session1 != session2 + + +def test_sse_gateway_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SSEGatewayServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_sse_gateway_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SSEGatewayServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + ], +) +def test_sse_gateway_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSEGatewayServiceGrpcTransport, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + ], +) +def test_sse_gateway_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_sse_gateway_service_grpc_lro_client(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_sse_gateway_service_grpc_lro_async_client(): + client = SSEGatewayServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_partner_sse_gateway_path(): + project = "squid" + location = "clam" + partner_sse_gateway = "whelk" + expected = "projects/{project}/locations/{location}/partnerSSEGateways/{partner_sse_gateway}".format( + project=project, + location=location, + partner_sse_gateway=partner_sse_gateway, + ) + actual = SSEGatewayServiceClient.partner_sse_gateway_path( + project, location, partner_sse_gateway + ) + assert expected == actual + + +def test_parse_partner_sse_gateway_path(): + expected = { + "project": "octopus", + "location": "oyster", + "partner_sse_gateway": "nudibranch", + } + path = SSEGatewayServiceClient.partner_sse_gateway_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_partner_sse_gateway_path(path) + assert expected == actual + + +def test_sse_gateway_reference_path(): + project = "cuttlefish" + location = "mussel" + sse_gateway_reference = "winkle" + expected = "projects/{project}/locations/{location}/sseGatewayReferences/{sse_gateway_reference}".format( + project=project, + location=location, + sse_gateway_reference=sse_gateway_reference, + ) + actual = SSEGatewayServiceClient.sse_gateway_reference_path( + project, location, sse_gateway_reference + ) + assert expected == actual + + +def test_parse_sse_gateway_reference_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "sse_gateway_reference": "abalone", + } + path = SSEGatewayServiceClient.sse_gateway_reference_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_sse_gateway_reference_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SSEGatewayServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SSEGatewayServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SSEGatewayServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SSEGatewayServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SSEGatewayServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SSEGatewayServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = SSEGatewayServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SSEGatewayServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SSEGatewayServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SSEGatewayServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SSEGatewayServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SSEGatewayServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SSEGatewayServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SSEGatewayServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = SSEGatewayServiceClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = SSEGatewayServiceAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SSEGatewayServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SSEGatewayServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SSEGatewayServiceClient, transports.SSEGatewayServiceGrpcTransport), + ( + SSEGatewayServiceAsyncClient, + transports.SSEGatewayServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_realm_service.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_realm_service.py new file mode 100644 index 000000000000..4b482a1977c5 --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1alpha1/test_sse_realm_service.py @@ -0,0 +1,14087 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1alpha1.services.sse_realm_service import ( + SSERealmServiceAsyncClient, + SSERealmServiceClient, + pagers, + transports, +) +from google.cloud.network_security_v1alpha1.types import common, sse_realm + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SSERealmServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SSERealmServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SSERealmServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SSERealmServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SSERealmServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SSERealmServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert SSERealmServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SSERealmServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SSERealmServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SSERealmServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SSERealmServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SSERealmServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SSERealmServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SSERealmServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SSERealmServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SSERealmServiceClient._get_client_cert_source(None, False) is None + assert ( + SSERealmServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + SSERealmServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SSERealmServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SSERealmServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SSERealmServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceClient), +) +@mock.patch.object( + SSERealmServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SSERealmServiceClient._DEFAULT_UNIVERSE + default_endpoint = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SSERealmServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SSERealmServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SSERealmServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSERealmServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SSERealmServiceClient._get_api_endpoint(None, None, default_universe, "always") + == SSERealmServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSERealmServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SSERealmServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SSERealmServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SSERealmServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SSERealmServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SSERealmServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SSERealmServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SSERealmServiceClient._get_universe_domain(None, None) + == SSERealmServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SSERealmServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SSERealmServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SSERealmServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SSERealmServiceClient, "grpc"), + (SSERealmServiceAsyncClient, "grpc_asyncio"), + (SSERealmServiceClient, "rest"), + ], +) +def test_sse_realm_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SSERealmServiceGrpcTransport, "grpc"), + (transports.SSERealmServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SSERealmServiceRestTransport, "rest"), + ], +) +def test_sse_realm_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SSERealmServiceClient, "grpc"), + (SSERealmServiceAsyncClient, "grpc_asyncio"), + (SSERealmServiceClient, "rest"), + ], +) +def test_sse_realm_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_sse_realm_service_client_get_transport_class(): + transport = SSERealmServiceClient.get_transport_class() + available_transports = [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceRestTransport, + ] + assert transport in available_transports + + transport = SSERealmServiceClient.get_transport_class("grpc") + assert transport == transports.SSERealmServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SSERealmServiceClient, transports.SSERealmServiceGrpcTransport, "grpc"), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SSERealmServiceClient, transports.SSERealmServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SSERealmServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceClient), +) +@mock.patch.object( + SSERealmServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceAsyncClient), +) +def test_sse_realm_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SSERealmServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SSERealmServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SSERealmServiceClient, + transports.SSERealmServiceGrpcTransport, + "grpc", + "true", + ), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SSERealmServiceClient, + transports.SSERealmServiceGrpcTransport, + "grpc", + "false", + ), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SSERealmServiceClient, + transports.SSERealmServiceRestTransport, + "rest", + "true", + ), + ( + SSERealmServiceClient, + transports.SSERealmServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SSERealmServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceClient), +) +@mock.patch.object( + SSERealmServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_sse_realm_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SSERealmServiceClient, SSERealmServiceAsyncClient] +) +@mock.patch.object( + SSERealmServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SSERealmServiceClient), +) +@mock.patch.object( + SSERealmServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SSERealmServiceAsyncClient), +) +def test_sse_realm_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SSERealmServiceClient, SSERealmServiceAsyncClient] +) +@mock.patch.object( + SSERealmServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceClient), +) +@mock.patch.object( + SSERealmServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SSERealmServiceAsyncClient), +) +def test_sse_realm_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SSERealmServiceClient._DEFAULT_UNIVERSE + default_endpoint = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SSERealmServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SSERealmServiceClient, transports.SSERealmServiceGrpcTransport, "grpc"), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SSERealmServiceClient, transports.SSERealmServiceRestTransport, "rest"), + ], +) +def test_sse_realm_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SSERealmServiceClient, + transports.SSERealmServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SSERealmServiceClient, transports.SSERealmServiceRestTransport, "rest", None), + ], +) +def test_sse_realm_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_sse_realm_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_realm_service.transports.SSERealmServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SSERealmServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SSERealmServiceClient, + transports.SSERealmServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SSERealmServiceAsyncClient, + transports.SSERealmServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_sse_realm_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListSACRealmsRequest, + dict, + ], +) +def test_list_sac_realms(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACRealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.ListSACRealmsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACRealmsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_sac_realms_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.ListSACRealmsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sac_realms(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.ListSACRealmsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_sac_realms_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sac_realms in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sac_realms] = mock_rpc + request = {} + client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sac_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sac_realms_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sac_realms + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sac_realms + ] = mock_rpc + + request = {} + await client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sac_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sac_realms_async( + transport: str = "grpc_asyncio", request_type=sse_realm.ListSACRealmsRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACRealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.ListSACRealmsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACRealmsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_sac_realms_async_from_dict(): + await test_list_sac_realms_async(request_type=dict) + + +def test_list_sac_realms_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListSACRealmsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + call.return_value = sse_realm.ListSACRealmsResponse() + client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sac_realms_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListSACRealmsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACRealmsResponse() + ) + await client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sac_realms_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACRealmsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sac_realms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sac_realms_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sac_realms( + sse_realm.ListSACRealmsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sac_realms_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACRealmsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACRealmsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sac_realms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sac_realms_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sac_realms( + sse_realm.ListSACRealmsRequest(), + parent="parent_value", + ) + + +def test_list_sac_realms_pager(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + next_page_token="abc", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[], + next_page_token="def", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sac_realms(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.SACRealm) for i in results) + + +def test_list_sac_realms_pages(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + next_page_token="abc", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[], + next_page_token="def", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sac_realms(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sac_realms_async_pager(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_realms), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + next_page_token="abc", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[], + next_page_token="def", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sac_realms( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, sse_realm.SACRealm) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sac_realms_async_pages(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_realms), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + next_page_token="abc", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[], + next_page_token="def", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sac_realms(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetSACRealmRequest, + dict, + ], +) +def test_get_sac_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACRealm( + name="name_value", + security_service=sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS, + state=sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT, + ) + response = client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.GetSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACRealm) + assert response.name == "name_value" + assert ( + response.security_service + == sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS + ) + assert response.state == sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT + + +def test_get_sac_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.GetSACRealmRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sac_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.GetSACRealmRequest( + name="name_value", + ) + + +def test_get_sac_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sac_realm] = mock_rpc + request = {} + client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sac_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sac_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sac_realm + ] = mock_rpc + + request = {} + await client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sac_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.GetSACRealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACRealm( + name="name_value", + security_service=sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS, + state=sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT, + ) + ) + response = await client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.GetSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACRealm) + assert response.name == "name_value" + assert ( + response.security_service + == sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS + ) + assert response.state == sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT + + +@pytest.mark.asyncio +async def test_get_sac_realm_async_from_dict(): + await test_get_sac_realm_async(request_type=dict) + + +def test_get_sac_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetSACRealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + call.return_value = sse_realm.SACRealm() + client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sac_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetSACRealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(sse_realm.SACRealm()) + await client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_sac_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACRealm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sac_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_sac_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sac_realm( + sse_realm.GetSACRealmRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sac_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACRealm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(sse_realm.SACRealm()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sac_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_sac_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sac_realm( + sse_realm.GetSACRealmRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreateSACRealmRequest, + dict, + ], +) +def test_create_sac_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.CreateSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_sac_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_sac_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.CreateSACRealmRequest( + parent="parent_value", + sac_realm_id="sac_realm_id_value", + ) + + +def test_create_sac_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_sac_realm + ] = mock_rpc + request = {} + client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sac_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_sac_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_sac_realm + ] = mock_rpc + + request = {} + await client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sac_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.CreateSACRealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.CreateSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_sac_realm_async_from_dict(): + await test_create_sac_realm_async(request_type=dict) + + +def test_create_sac_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreateSACRealmRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_sac_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreateSACRealmRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_sac_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sac_realm( + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sac_realm + mock_val = sse_realm.SACRealm(name="name_value") + assert arg == mock_val + arg = args[0].sac_realm_id + mock_val = "sac_realm_id_value" + assert arg == mock_val + + +def test_create_sac_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sac_realm( + sse_realm.CreateSACRealmRequest(), + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_sac_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sac_realm( + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sac_realm + mock_val = sse_realm.SACRealm(name="name_value") + assert arg == mock_val + arg = args[0].sac_realm_id + mock_val = "sac_realm_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_sac_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sac_realm( + sse_realm.CreateSACRealmRequest(), + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeleteSACRealmRequest, + dict, + ], +) +def test_delete_sac_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.DeleteSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_sac_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.DeleteSACRealmRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_sac_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.DeleteSACRealmRequest( + name="name_value", + ) + + +def test_delete_sac_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_sac_realm + ] = mock_rpc + request = {} + client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sac_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_sac_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_sac_realm + ] = mock_rpc + + request = {} + await client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sac_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.DeleteSACRealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.DeleteSACRealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_sac_realm_async_from_dict(): + await test_delete_sac_realm_async(request_type=dict) + + +def test_delete_sac_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeleteSACRealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_sac_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeleteSACRealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_sac_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sac_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_sac_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sac_realm( + sse_realm.DeleteSACRealmRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_sac_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sac_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_sac_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sac_realm( + sse_realm.DeleteSACRealmRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListSACAttachmentsRequest, + dict, + ], +) +def test_list_sac_attachments(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACAttachmentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.ListSACAttachmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACAttachmentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_sac_attachments_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.ListSACAttachmentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sac_attachments(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.ListSACAttachmentsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_sac_attachments_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_sac_attachments in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sac_attachments + ] = mock_rpc + request = {} + client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sac_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sac_attachments_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sac_attachments + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sac_attachments + ] = mock_rpc + + request = {} + await client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sac_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_sac_attachments_async( + transport: str = "grpc_asyncio", request_type=sse_realm.ListSACAttachmentsRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACAttachmentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.ListSACAttachmentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACAttachmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_sac_attachments_async_from_dict(): + await test_list_sac_attachments_async(request_type=dict) + + +def test_list_sac_attachments_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListSACAttachmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + call.return_value = sse_realm.ListSACAttachmentsResponse() + client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sac_attachments_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListSACAttachmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACAttachmentsResponse() + ) + await client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sac_attachments_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACAttachmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sac_attachments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sac_attachments_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sac_attachments( + sse_realm.ListSACAttachmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sac_attachments_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListSACAttachmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACAttachmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sac_attachments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sac_attachments_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sac_attachments( + sse_realm.ListSACAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_sac_attachments_pager(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + next_page_token="abc", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[], + next_page_token="def", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sac_attachments(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.SACAttachment) for i in results) + + +def test_list_sac_attachments_pages(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + next_page_token="abc", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[], + next_page_token="def", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sac_attachments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sac_attachments_async_pager(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + next_page_token="abc", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[], + next_page_token="def", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sac_attachments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, sse_realm.SACAttachment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sac_attachments_async_pages(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + next_page_token="abc", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[], + next_page_token="def", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sac_attachments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetSACAttachmentRequest, + dict, + ], +) +def test_get_sac_attachment(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACAttachment( + name="name_value", + sac_realm="sac_realm_value", + ncc_gateway="ncc_gateway_value", + country="country_value", + time_zone="time_zone_value", + state=sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT, + ) + response = client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.GetSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACAttachment) + assert response.name == "name_value" + assert response.sac_realm == "sac_realm_value" + assert response.ncc_gateway == "ncc_gateway_value" + assert response.country == "country_value" + assert response.time_zone == "time_zone_value" + assert response.state == sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT + + +def test_get_sac_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.GetSACAttachmentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sac_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.GetSACAttachmentRequest( + name="name_value", + ) + + +def test_get_sac_attachment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_sac_attachment in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_sac_attachment + ] = mock_rpc + request = {} + client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sac_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sac_attachment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sac_attachment + ] = mock_rpc + + request = {} + await client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sac_attachment_async( + transport: str = "grpc_asyncio", request_type=sse_realm.GetSACAttachmentRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACAttachment( + name="name_value", + sac_realm="sac_realm_value", + ncc_gateway="ncc_gateway_value", + country="country_value", + time_zone="time_zone_value", + state=sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT, + ) + ) + response = await client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.GetSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACAttachment) + assert response.name == "name_value" + assert response.sac_realm == "sac_realm_value" + assert response.ncc_gateway == "ncc_gateway_value" + assert response.country == "country_value" + assert response.time_zone == "time_zone_value" + assert response.state == sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT + + +@pytest.mark.asyncio +async def test_get_sac_attachment_async_from_dict(): + await test_get_sac_attachment_async(request_type=dict) + + +def test_get_sac_attachment_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetSACAttachmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + call.return_value = sse_realm.SACAttachment() + client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sac_attachment_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetSACAttachmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACAttachment() + ) + await client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_sac_attachment_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACAttachment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sac_attachment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_sac_attachment_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sac_attachment( + sse_realm.GetSACAttachmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sac_attachment_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.SACAttachment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACAttachment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sac_attachment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_sac_attachment_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sac_attachment( + sse_realm.GetSACAttachmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreateSACAttachmentRequest, + dict, + ], +) +def test_create_sac_attachment(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.CreateSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_sac_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_sac_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.CreateSACAttachmentRequest( + parent="parent_value", + sac_attachment_id="sac_attachment_id_value", + ) + + +def test_create_sac_attachment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_sac_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_sac_attachment + ] = mock_rpc + request = {} + client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sac_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_sac_attachment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_sac_attachment + ] = mock_rpc + + request = {} + await client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sac_attachment_async( + transport: str = "grpc_asyncio", request_type=sse_realm.CreateSACAttachmentRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.CreateSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_sac_attachment_async_from_dict(): + await test_create_sac_attachment_async(request_type=dict) + + +def test_create_sac_attachment_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreateSACAttachmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_sac_attachment_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreateSACAttachmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_sac_attachment_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sac_attachment( + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sac_attachment + mock_val = sse_realm.SACAttachment(name="name_value") + assert arg == mock_val + arg = args[0].sac_attachment_id + mock_val = "sac_attachment_id_value" + assert arg == mock_val + + +def test_create_sac_attachment_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sac_attachment( + sse_realm.CreateSACAttachmentRequest(), + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_sac_attachment_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sac_attachment( + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sac_attachment + mock_val = sse_realm.SACAttachment(name="name_value") + assert arg == mock_val + arg = args[0].sac_attachment_id + mock_val = "sac_attachment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_sac_attachment_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sac_attachment( + sse_realm.CreateSACAttachmentRequest(), + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeleteSACAttachmentRequest, + dict, + ], +) +def test_delete_sac_attachment(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.DeleteSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_sac_attachment_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.DeleteSACAttachmentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_sac_attachment(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.DeleteSACAttachmentRequest( + name="name_value", + ) + + +def test_delete_sac_attachment_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_sac_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_sac_attachment + ] = mock_rpc + request = {} + client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_sac_attachment + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_sac_attachment + ] = mock_rpc + + request = {} + await client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_async( + transport: str = "grpc_asyncio", request_type=sse_realm.DeleteSACAttachmentRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.DeleteSACAttachmentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_async_from_dict(): + await test_delete_sac_attachment_async(request_type=dict) + + +def test_delete_sac_attachment_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeleteSACAttachmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeleteSACAttachmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_sac_attachment_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sac_attachment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_sac_attachment_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sac_attachment( + sse_realm.DeleteSACAttachmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sac_attachment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_sac_attachment_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sac_attachment( + sse_realm.DeleteSACAttachmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListPartnerSSERealmsRequest, + dict, + ], +) +def test_list_partner_sse_realms(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListPartnerSSERealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.ListPartnerSSERealmsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSERealmsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_partner_sse_realms_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.ListPartnerSSERealmsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_partner_sse_realms(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.ListPartnerSSERealmsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_partner_sse_realms_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_partner_sse_realms + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_partner_sse_realms + ] = mock_rpc + request = {} + client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partner_sse_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_partner_sse_realms + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_partner_sse_realms + ] = mock_rpc + + request = {} + await client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_partner_sse_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_async( + transport: str = "grpc_asyncio", request_type=sse_realm.ListPartnerSSERealmsRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListPartnerSSERealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.ListPartnerSSERealmsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSERealmsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_async_from_dict(): + await test_list_partner_sse_realms_async(request_type=dict) + + +def test_list_partner_sse_realms_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListPartnerSSERealmsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + call.return_value = sse_realm.ListPartnerSSERealmsResponse() + client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.ListPartnerSSERealmsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListPartnerSSERealmsResponse() + ) + await client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_partner_sse_realms_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListPartnerSSERealmsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_partner_sse_realms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_partner_sse_realms_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partner_sse_realms( + sse_realm.ListPartnerSSERealmsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.ListPartnerSSERealmsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListPartnerSSERealmsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_partner_sse_realms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_partner_sse_realms( + sse_realm.ListPartnerSSERealmsRequest(), + parent="parent_value", + ) + + +def test_list_partner_sse_realms_pager(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + next_page_token="abc", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[], + next_page_token="def", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_partner_sse_realms(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.PartnerSSERealm) for i in results) + + +def test_list_partner_sse_realms_pages(transport_name: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + next_page_token="abc", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[], + next_page_token="def", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + ), + RuntimeError, + ) + pages = list(client.list_partner_sse_realms(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_async_pager(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + next_page_token="abc", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[], + next_page_token="def", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_partner_sse_realms( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, sse_realm.PartnerSSERealm) for i in responses) + + +@pytest.mark.asyncio +async def test_list_partner_sse_realms_async_pages(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + next_page_token="abc", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[], + next_page_token="def", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_partner_sse_realms(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetPartnerSSERealmRequest, + dict, + ], +) +def test_get_partner_sse_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.PartnerSSERealm( + name="name_value", + pairing_key="pairing_key_value", + partner_vpc="partner_vpc_value", + sse_vpc="sse_vpc_value", + sse_project="sse_project_value", + state=sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED, + partner_network="partner_network_value", + sse_network="sse_network_value", + sse_project_number=1929, + ) + response = client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.GetPartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.PartnerSSERealm) + assert response.name == "name_value" + assert response.pairing_key == "pairing_key_value" + assert response.partner_vpc == "partner_vpc_value" + assert response.sse_vpc == "sse_vpc_value" + assert response.sse_project == "sse_project_value" + assert response.state == sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED + assert response.partner_network == "partner_network_value" + assert response.sse_network == "sse_network_value" + assert response.sse_project_number == 1929 + + +def test_get_partner_sse_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.GetPartnerSSERealmRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_partner_sse_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.GetPartnerSSERealmRequest( + name="name_value", + ) + + +def test_get_partner_sse_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_partner_sse_realm + ] = mock_rpc + request = {} + client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_partner_sse_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_partner_sse_realm + ] = mock_rpc + + request = {} + await client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.GetPartnerSSERealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.PartnerSSERealm( + name="name_value", + pairing_key="pairing_key_value", + partner_vpc="partner_vpc_value", + sse_vpc="sse_vpc_value", + sse_project="sse_project_value", + state=sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED, + partner_network="partner_network_value", + sse_network="sse_network_value", + sse_project_number=1929, + ) + ) + response = await client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.GetPartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.PartnerSSERealm) + assert response.name == "name_value" + assert response.pairing_key == "pairing_key_value" + assert response.partner_vpc == "partner_vpc_value" + assert response.sse_vpc == "sse_vpc_value" + assert response.sse_project == "sse_project_value" + assert response.state == sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED + assert response.partner_network == "partner_network_value" + assert response.sse_network == "sse_network_value" + assert response.sse_project_number == 1929 + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_async_from_dict(): + await test_get_partner_sse_realm_async(request_type=dict) + + +def test_get_partner_sse_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetPartnerSSERealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + call.return_value = sse_realm.PartnerSSERealm() + client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.GetPartnerSSERealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.PartnerSSERealm() + ) + await client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_partner_sse_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.PartnerSSERealm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_partner_sse_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_partner_sse_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partner_sse_realm( + sse_realm.GetPartnerSSERealmRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = sse_realm.PartnerSSERealm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.PartnerSSERealm() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_partner_sse_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_partner_sse_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_partner_sse_realm( + sse_realm.GetPartnerSSERealmRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreatePartnerSSERealmRequest, + dict, + ], +) +def test_create_partner_sse_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.CreatePartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_partner_sse_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_partner_sse_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.CreatePartnerSSERealmRequest( + parent="parent_value", + partner_sse_realm_id="partner_sse_realm_id_value", + request_id="request_id_value", + ) + + +def test_create_partner_sse_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_partner_sse_realm + ] = mock_rpc + request = {} + client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_partner_sse_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_partner_sse_realm + ] = mock_rpc + + request = {} + await client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.CreatePartnerSSERealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.CreatePartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_async_from_dict(): + await test_create_partner_sse_realm_async(request_type=dict) + + +def test_create_partner_sse_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreatePartnerSSERealmRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.CreatePartnerSSERealmRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_partner_sse_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_partner_sse_realm( + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partner_sse_realm + mock_val = sse_realm.PartnerSSERealm(name="name_value") + assert arg == mock_val + arg = args[0].partner_sse_realm_id + mock_val = "partner_sse_realm_id_value" + assert arg == mock_val + + +def test_create_partner_sse_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partner_sse_realm( + sse_realm.CreatePartnerSSERealmRequest(), + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_partner_sse_realm( + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partner_sse_realm + mock_val = sse_realm.PartnerSSERealm(name="name_value") + assert arg == mock_val + arg = args[0].partner_sse_realm_id + mock_val = "partner_sse_realm_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_partner_sse_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_partner_sse_realm( + sse_realm.CreatePartnerSSERealmRequest(), + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeletePartnerSSERealmRequest, + dict, + ], +) +def test_delete_partner_sse_realm(request_type, transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = sse_realm.DeletePartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_partner_sse_realm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = sse_realm.DeletePartnerSSERealmRequest( + name="name_value", + request_id="request_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_partner_sse_realm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == sse_realm.DeletePartnerSSERealmRequest( + name="name_value", + request_id="request_id_value", + ) + + +def test_delete_partner_sse_realm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_partner_sse_realm + ] = mock_rpc + request = {} + client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_partner_sse_realm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_partner_sse_realm + ] = mock_rpc + + request = {} + await client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_async( + transport: str = "grpc_asyncio", request_type=sse_realm.DeletePartnerSSERealmRequest +): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = sse_realm.DeletePartnerSSERealmRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_async_from_dict(): + await test_delete_partner_sse_realm_async(request_type=dict) + + +def test_delete_partner_sse_realm_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeletePartnerSSERealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = sse_realm.DeletePartnerSSERealmRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_partner_sse_realm_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_partner_sse_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_partner_sse_realm_flattened_error(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partner_sse_realm( + sse_realm.DeletePartnerSSERealmRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_flattened_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_partner_sse_realm( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_flattened_error_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_partner_sse_realm( + sse_realm.DeletePartnerSSERealmRequest(), + name="name_value", + ) + + +def test_list_sac_realms_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sac_realms in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sac_realms] = mock_rpc + + request = {} + client.list_sac_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sac_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sac_realms_rest_required_fields( + request_type=sse_realm.ListSACRealmsRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sac_realms._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sac_realms._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACRealmsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListSACRealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sac_realms(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sac_realms_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sac_realms._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_sac_realms_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACRealmsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.ListSACRealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sac_realms(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/sacRealms" + % client.transport._host, + args[1], + ) + + +def test_list_sac_realms_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sac_realms( + sse_realm.ListSACRealmsRequest(), + parent="parent_value", + ) + + +def test_list_sac_realms_rest_pager(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + next_page_token="abc", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[], + next_page_token="def", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACRealmsResponse( + sac_realms=[ + sse_realm.SACRealm(), + sse_realm.SACRealm(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(sse_realm.ListSACRealmsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sac_realms(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.SACRealm) for i in results) + + pages = list(client.list_sac_realms(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_sac_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sac_realm] = mock_rpc + + request = {} + client.get_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_sac_realm_rest_required_fields(request_type=sse_realm.GetSACRealmRequest): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sac_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sac_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACRealm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.SACRealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_sac_realm(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_sac_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_sac_realm._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_sac_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACRealm() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sacRealms/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.SACRealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_sac_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/sacRealms/*}" + % client.transport._host, + args[1], + ) + + +def test_get_sac_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sac_realm( + sse_realm.GetSACRealmRequest(), + name="name_value", + ) + + +def test_create_sac_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_sac_realm + ] = mock_rpc + + request = {} + client.create_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_sac_realm_rest_required_fields( + request_type=sse_realm.CreateSACRealmRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["sac_realm_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "sacRealmId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_sac_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "sacRealmId" in jsonified_request + assert jsonified_request["sacRealmId"] == request_init["sac_realm_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["sacRealmId"] = "sac_realm_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_sac_realm._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "sac_realm_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sacRealmId" in jsonified_request + assert jsonified_request["sacRealmId"] == "sac_realm_id_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_sac_realm(request) + + expected_params = [ + ( + "sacRealmId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_sac_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_sac_realm._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sacRealmId", + ) + ) + & set( + ( + "parent", + "sacRealmId", + "sacRealm", + ) + ) + ) + + +def test_create_sac_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_sac_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/sacRealms" + % client.transport._host, + args[1], + ) + + +def test_create_sac_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sac_realm( + sse_realm.CreateSACRealmRequest(), + parent="parent_value", + sac_realm=sse_realm.SACRealm(name="name_value"), + sac_realm_id="sac_realm_id_value", + ) + + +def test_delete_sac_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sac_realm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_sac_realm + ] = mock_rpc + + request = {} + client.delete_sac_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_sac_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_sac_realm_rest_required_fields( + request_type=sse_realm.DeleteSACRealmRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_sac_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_sac_realm._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_sac_realm(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_sac_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_sac_realm._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_sac_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sacRealms/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_sac_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/sacRealms/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_sac_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sac_realm( + sse_realm.DeleteSACRealmRequest(), + name="name_value", + ) + + +def test_list_sac_attachments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_sac_attachments in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_sac_attachments + ] = mock_rpc + + request = {} + client.list_sac_attachments(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sac_attachments(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sac_attachments_rest_required_fields( + request_type=sse_realm.ListSACAttachmentsRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sac_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sac_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACAttachmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListSACAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sac_attachments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sac_attachments_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sac_attachments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_sac_attachments_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACAttachmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.ListSACAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sac_attachments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/sacAttachments" + % client.transport._host, + args[1], + ) + + +def test_list_sac_attachments_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sac_attachments( + sse_realm.ListSACAttachmentsRequest(), + parent="parent_value", + ) + + +def test_list_sac_attachments_rest_pager(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + next_page_token="abc", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[], + next_page_token="def", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + ], + next_page_token="ghi", + ), + sse_realm.ListSACAttachmentsResponse( + sac_attachments=[ + sse_realm.SACAttachment(), + sse_realm.SACAttachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + sse_realm.ListSACAttachmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_sac_attachments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.SACAttachment) for i in results) + + pages = list(client.list_sac_attachments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_sac_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_sac_attachment in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_sac_attachment + ] = mock_rpc + + request = {} + client.get_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_sac_attachment_rest_required_fields( + request_type=sse_realm.GetSACAttachmentRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sac_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_sac_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.SACAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_sac_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_sac_attachment_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_sac_attachment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_sac_attachment_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sacAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.SACAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_sac_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/sacAttachments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_sac_attachment_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sac_attachment( + sse_realm.GetSACAttachmentRequest(), + name="name_value", + ) + + +def test_create_sac_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_sac_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_sac_attachment + ] = mock_rpc + + request = {} + client.create_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_sac_attachment_rest_required_fields( + request_type=sse_realm.CreateSACAttachmentRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["sac_attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "sacAttachmentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_sac_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "sacAttachmentId" in jsonified_request + assert jsonified_request["sacAttachmentId"] == request_init["sac_attachment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["sacAttachmentId"] = "sac_attachment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_sac_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "sac_attachment_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sacAttachmentId" in jsonified_request + assert jsonified_request["sacAttachmentId"] == "sac_attachment_id_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_sac_attachment(request) + + expected_params = [ + ( + "sacAttachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_sac_attachment_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_sac_attachment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sacAttachmentId", + ) + ) + & set( + ( + "parent", + "sacAttachmentId", + "sacAttachment", + ) + ) + ) + + +def test_create_sac_attachment_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_sac_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/sacAttachments" + % client.transport._host, + args[1], + ) + + +def test_create_sac_attachment_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sac_attachment( + sse_realm.CreateSACAttachmentRequest(), + parent="parent_value", + sac_attachment=sse_realm.SACAttachment(name="name_value"), + sac_attachment_id="sac_attachment_id_value", + ) + + +def test_delete_sac_attachment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_sac_attachment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_sac_attachment + ] = mock_rpc + + request = {} + client.delete_sac_attachment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_sac_attachment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_sac_attachment_rest_required_fields( + request_type=sse_realm.DeleteSACAttachmentRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_sac_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_sac_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_sac_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_sac_attachment_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_sac_attachment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_sac_attachment_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sacAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_sac_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/sacAttachments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_sac_attachment_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sac_attachment( + sse_realm.DeleteSACAttachmentRequest(), + name="name_value", + ) + + +def test_list_partner_sse_realms_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_partner_sse_realms + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_partner_sse_realms + ] = mock_rpc + + request = {} + client.list_partner_sse_realms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_partner_sse_realms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_partner_sse_realms_rest_required_fields( + request_type=sse_realm.ListPartnerSSERealmsRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_partner_sse_realms._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_partner_sse_realms._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListPartnerSSERealmsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListPartnerSSERealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_partner_sse_realms(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_partner_sse_realms_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_partner_sse_realms._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_partner_sse_realms_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListPartnerSSERealmsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.ListPartnerSSERealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_partner_sse_realms(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/partnerSSERealms" + % client.transport._host, + args[1], + ) + + +def test_list_partner_sse_realms_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_partner_sse_realms( + sse_realm.ListPartnerSSERealmsRequest(), + parent="parent_value", + ) + + +def test_list_partner_sse_realms_rest_pager(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + next_page_token="abc", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[], + next_page_token="def", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + ], + next_page_token="ghi", + ), + sse_realm.ListPartnerSSERealmsResponse( + partner_sse_realms=[ + sse_realm.PartnerSSERealm(), + sse_realm.PartnerSSERealm(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + sse_realm.ListPartnerSSERealmsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_partner_sse_realms(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, sse_realm.PartnerSSERealm) for i in results) + + pages = list(client.list_partner_sse_realms(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_partner_sse_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_partner_sse_realm + ] = mock_rpc + + request = {} + client.get_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_partner_sse_realm_rest_required_fields( + request_type=sse_realm.GetPartnerSSERealmRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner_sse_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner_sse_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = sse_realm.PartnerSSERealm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.PartnerSSERealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_partner_sse_realm(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_partner_sse_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_partner_sse_realm._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_partner_sse_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.PartnerSSERealm() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = sse_realm.PartnerSSERealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_partner_sse_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/partnerSSERealms/*}" + % client.transport._host, + args[1], + ) + + +def test_get_partner_sse_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partner_sse_realm( + sse_realm.GetPartnerSSERealmRequest(), + name="name_value", + ) + + +def test_create_partner_sse_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_partner_sse_realm + ] = mock_rpc + + request = {} + client.create_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_partner_sse_realm_rest_required_fields( + request_type=sse_realm.CreatePartnerSSERealmRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["partner_sse_realm_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "partnerSseRealmId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_partner_sse_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "partnerSseRealmId" in jsonified_request + assert ( + jsonified_request["partnerSseRealmId"] == request_init["partner_sse_realm_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["partnerSseRealmId"] = "partner_sse_realm_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_partner_sse_realm._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "partner_sse_realm_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "partnerSseRealmId" in jsonified_request + assert jsonified_request["partnerSseRealmId"] == "partner_sse_realm_id_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_partner_sse_realm(request) + + expected_params = [ + ( + "partnerSseRealmId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_partner_sse_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_partner_sse_realm._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "partnerSseRealmId", + "requestId", + ) + ) + & set( + ( + "parent", + "partnerSseRealmId", + "partnerSseRealm", + ) + ) + ) + + +def test_create_partner_sse_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_partner_sse_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/partnerSSERealms" + % client.transport._host, + args[1], + ) + + +def test_create_partner_sse_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partner_sse_realm( + sse_realm.CreatePartnerSSERealmRequest(), + parent="parent_value", + partner_sse_realm=sse_realm.PartnerSSERealm(name="name_value"), + partner_sse_realm_id="partner_sse_realm_id_value", + ) + + +def test_delete_partner_sse_realm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_partner_sse_realm + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_partner_sse_realm + ] = mock_rpc + + request = {} + client.delete_partner_sse_realm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_partner_sse_realm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_partner_sse_realm_rest_required_fields( + request_type=sse_realm.DeletePartnerSSERealmRequest, +): + transport_class = transports.SSERealmServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_partner_sse_realm._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_partner_sse_realm._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_partner_sse_realm(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_partner_sse_realm_rest_unset_required_fields(): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_partner_sse_realm._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_partner_sse_realm_rest_flattened(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_partner_sse_realm(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/partnerSSERealms/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_partner_sse_realm_rest_flattened_error(transport: str = "rest"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partner_sse_realm( + sse_realm.DeletePartnerSSERealmRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSERealmServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SSERealmServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SSERealmServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SSERealmServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SSERealmServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SSERealmServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SSERealmServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + transports.SSERealmServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SSERealmServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sac_realms_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + call.return_value = sse_realm.ListSACRealmsResponse() + client.list_sac_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACRealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sac_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + call.return_value = sse_realm.SACRealm() + client.get_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sac_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sac_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sac_attachments_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + call.return_value = sse_realm.ListSACAttachmentsResponse() + client.list_sac_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sac_attachment_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + call.return_value = sse_realm.SACAttachment() + client.get_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sac_attachment_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sac_attachment_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partner_sse_realms_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + call.return_value = sse_realm.ListPartnerSSERealmsResponse() + client.list_partner_sse_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListPartnerSSERealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_sse_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + call.return_value = sse_realm.PartnerSSERealm() + client.get_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetPartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partner_sse_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreatePartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partner_sse_realm_empty_call_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeletePartnerSSERealmRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SSERealmServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sac_realms_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACRealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_sac_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACRealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sac_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACRealm( + name="name_value", + security_service=sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS, + state=sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT, + ) + ) + await client.get_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sac_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_sac_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sac_attachments_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListSACAttachmentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_sac_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sac_attachment_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.SACAttachment( + name="name_value", + sac_realm="sac_realm_value", + ncc_gateway="ncc_gateway_value", + country="country_value", + time_zone="time_zone_value", + state=sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT, + ) + ) + await client.get_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sac_attachment_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_sac_attachment_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_partner_sse_realms_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.ListPartnerSSERealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_partner_sse_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListPartnerSSERealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partner_sse_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + sse_realm.PartnerSSERealm( + name="name_value", + pairing_key="pairing_key_value", + partner_vpc="partner_vpc_value", + sse_vpc="sse_vpc_value", + sse_project="sse_project_value", + state=sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED, + partner_network="partner_network_value", + sse_network="sse_network_value", + sse_project_number=1929, + ) + ) + await client.get_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetPartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_partner_sse_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreatePartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_partner_sse_realm_empty_call_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeletePartnerSSERealmRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SSERealmServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_sac_realms_rest_bad_request(request_type=sse_realm.ListSACRealmsRequest): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sac_realms(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListSACRealmsRequest, + dict, + ], +) +def test_list_sac_realms_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACRealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListSACRealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sac_realms(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACRealmsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sac_realms_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_list_sac_realms" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_list_sac_realms_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_list_sac_realms" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.ListSACRealmsRequest.pb(sse_realm.ListSACRealmsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.ListSACRealmsResponse.to_json( + sse_realm.ListSACRealmsResponse() + ) + req.return_value.content = return_value + + request = sse_realm.ListSACRealmsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.ListSACRealmsResponse() + post_with_metadata.return_value = sse_realm.ListSACRealmsResponse(), metadata + + client.list_sac_realms( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_sac_realm_rest_bad_request(request_type=sse_realm.GetSACRealmRequest): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacRealms/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_sac_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetSACRealmRequest, + dict, + ], +) +def test_get_sac_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacRealms/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACRealm( + name="name_value", + security_service=sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS, + state=sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.SACRealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_sac_realm(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACRealm) + assert response.name == "name_value" + assert ( + response.security_service + == sse_realm.SACRealm.SecurityService.PALO_ALTO_PRISMA_ACCESS + ) + assert response.state == sse_realm.SACRealm.State.PENDING_PARTNER_ATTACHMENT + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_sac_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_get_sac_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_get_sac_realm_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_get_sac_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.GetSACRealmRequest.pb(sse_realm.GetSACRealmRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.SACRealm.to_json(sse_realm.SACRealm()) + req.return_value.content = return_value + + request = sse_realm.GetSACRealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.SACRealm() + post_with_metadata.return_value = sse_realm.SACRealm(), metadata + + client.get_sac_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_sac_realm_rest_bad_request( + request_type=sse_realm.CreateSACRealmRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_sac_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreateSACRealmRequest, + dict, + ], +) +def test_create_sac_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["sac_realm"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "security_service": 1, + "pairing_key": {"key": "key_value", "expire_time": {}}, + "state": 7, + "symantec_options": { + "available_symantec_sites": [ + "available_symantec_sites_value1", + "available_symantec_sites_value2", + ], + "secret_path": "secret_path_value", + "symantec_connection_state": 1, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = sse_realm.CreateSACRealmRequest.meta.fields["sac_realm"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["sac_realm"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["sac_realm"][field])): + del request_init["sac_realm"][field][i][subfield] + else: + del request_init["sac_realm"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_sac_realm(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_sac_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_create_sac_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_create_sac_realm_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_create_sac_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.CreateSACRealmRequest.pb( + sse_realm.CreateSACRealmRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.CreateSACRealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_sac_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_sac_realm_rest_bad_request( + request_type=sse_realm.DeleteSACRealmRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacRealms/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_sac_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeleteSACRealmRequest, + dict, + ], +) +def test_delete_sac_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacRealms/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_sac_realm(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_sac_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_delete_sac_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_delete_sac_realm_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_delete_sac_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.DeleteSACRealmRequest.pb( + sse_realm.DeleteSACRealmRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.DeleteSACRealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_sac_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_sac_attachments_rest_bad_request( + request_type=sse_realm.ListSACAttachmentsRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sac_attachments(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListSACAttachmentsRequest, + dict, + ], +) +def test_list_sac_attachments_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListSACAttachmentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListSACAttachmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sac_attachments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSACAttachmentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sac_attachments_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_list_sac_attachments" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_list_sac_attachments_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_list_sac_attachments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.ListSACAttachmentsRequest.pb( + sse_realm.ListSACAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.ListSACAttachmentsResponse.to_json( + sse_realm.ListSACAttachmentsResponse() + ) + req.return_value.content = return_value + + request = sse_realm.ListSACAttachmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.ListSACAttachmentsResponse() + post_with_metadata.return_value = ( + sse_realm.ListSACAttachmentsResponse(), + metadata, + ) + + client.list_sac_attachments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_sac_attachment_rest_bad_request( + request_type=sse_realm.GetSACAttachmentRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacAttachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_sac_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetSACAttachmentRequest, + dict, + ], +) +def test_get_sac_attachment_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacAttachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.SACAttachment( + name="name_value", + sac_realm="sac_realm_value", + ncc_gateway="ncc_gateway_value", + country="country_value", + time_zone="time_zone_value", + state=sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.SACAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_sac_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.SACAttachment) + assert response.name == "name_value" + assert response.sac_realm == "sac_realm_value" + assert response.ncc_gateway == "ncc_gateway_value" + assert response.country == "country_value" + assert response.time_zone == "time_zone_value" + assert response.state == sse_realm.SACAttachment.State.PENDING_PARTNER_ATTACHMENT + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_sac_attachment_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_get_sac_attachment" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_get_sac_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_get_sac_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.GetSACAttachmentRequest.pb( + sse_realm.GetSACAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.SACAttachment.to_json(sse_realm.SACAttachment()) + req.return_value.content = return_value + + request = sse_realm.GetSACAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.SACAttachment() + post_with_metadata.return_value = sse_realm.SACAttachment(), metadata + + client.get_sac_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_sac_attachment_rest_bad_request( + request_type=sse_realm.CreateSACAttachmentRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_sac_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreateSACAttachmentRequest, + dict, + ], +) +def test_create_sac_attachment_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["sac_attachment"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "sac_realm": "sac_realm_value", + "ncc_gateway": "ncc_gateway_value", + "country": "country_value", + "time_zone": "time_zone_value", + "symantec_options": { + "symantec_site": "symantec_site_value", + "symantec_location_name": "symantec_location_name_value", + }, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = sse_realm.CreateSACAttachmentRequest.meta.fields["sac_attachment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["sac_attachment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["sac_attachment"][field])): + del request_init["sac_attachment"][field][i][subfield] + else: + del request_init["sac_attachment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_sac_attachment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_sac_attachment_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_create_sac_attachment" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_create_sac_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_create_sac_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.CreateSACAttachmentRequest.pb( + sse_realm.CreateSACAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.CreateSACAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_sac_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_sac_attachment_rest_bad_request( + request_type=sse_realm.DeleteSACAttachmentRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacAttachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_sac_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeleteSACAttachmentRequest, + dict, + ], +) +def test_delete_sac_attachment_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sacAttachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_sac_attachment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_sac_attachment_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_delete_sac_attachment" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_delete_sac_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_delete_sac_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.DeleteSACAttachmentRequest.pb( + sse_realm.DeleteSACAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.DeleteSACAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_sac_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_partner_sse_realms_rest_bad_request( + request_type=sse_realm.ListPartnerSSERealmsRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_partner_sse_realms(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.ListPartnerSSERealmsRequest, + dict, + ], +) +def test_list_partner_sse_realms_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.ListPartnerSSERealmsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.ListPartnerSSERealmsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_partner_sse_realms(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPartnerSSERealmsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_partner_sse_realms_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_list_partner_sse_realms" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_list_partner_sse_realms_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_list_partner_sse_realms" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.ListPartnerSSERealmsRequest.pb( + sse_realm.ListPartnerSSERealmsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.ListPartnerSSERealmsResponse.to_json( + sse_realm.ListPartnerSSERealmsResponse() + ) + req.return_value.content = return_value + + request = sse_realm.ListPartnerSSERealmsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.ListPartnerSSERealmsResponse() + post_with_metadata.return_value = ( + sse_realm.ListPartnerSSERealmsResponse(), + metadata, + ) + + client.list_partner_sse_realms( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_partner_sse_realm_rest_bad_request( + request_type=sse_realm.GetPartnerSSERealmRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_partner_sse_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.GetPartnerSSERealmRequest, + dict, + ], +) +def test_get_partner_sse_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = sse_realm.PartnerSSERealm( + name="name_value", + pairing_key="pairing_key_value", + partner_vpc="partner_vpc_value", + sse_vpc="sse_vpc_value", + sse_project="sse_project_value", + state=sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED, + partner_network="partner_network_value", + sse_network="sse_network_value", + sse_project_number=1929, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = sse_realm.PartnerSSERealm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_partner_sse_realm(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, sse_realm.PartnerSSERealm) + assert response.name == "name_value" + assert response.pairing_key == "pairing_key_value" + assert response.partner_vpc == "partner_vpc_value" + assert response.sse_vpc == "sse_vpc_value" + assert response.sse_project == "sse_project_value" + assert response.state == sse_realm.PartnerSSERealm.State.CUSTOMER_ATTACHED + assert response.partner_network == "partner_network_value" + assert response.sse_network == "sse_network_value" + assert response.sse_project_number == 1929 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_partner_sse_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_get_partner_sse_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_get_partner_sse_realm_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_get_partner_sse_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.GetPartnerSSERealmRequest.pb( + sse_realm.GetPartnerSSERealmRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = sse_realm.PartnerSSERealm.to_json(sse_realm.PartnerSSERealm()) + req.return_value.content = return_value + + request = sse_realm.GetPartnerSSERealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = sse_realm.PartnerSSERealm() + post_with_metadata.return_value = sse_realm.PartnerSSERealm(), metadata + + client.get_partner_sse_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_partner_sse_realm_rest_bad_request( + request_type=sse_realm.CreatePartnerSSERealmRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_partner_sse_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.CreatePartnerSSERealmRequest, + dict, + ], +) +def test_create_partner_sse_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["partner_sse_realm"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "pairing_key": "pairing_key_value", + "partner_vpc": "partner_vpc_value", + "sse_vpc": "sse_vpc_value", + "sse_project": "sse_project_value", + "state": 1, + "partner_network": "partner_network_value", + "sse_network": "sse_network_value", + "pan_options": { + "serial_number": "serial_number_value", + "tenant_id": "tenant_id_value", + }, + "sse_project_number": 1929, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = sse_realm.CreatePartnerSSERealmRequest.meta.fields["partner_sse_realm"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["partner_sse_realm"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["partner_sse_realm"][field])): + del request_init["partner_sse_realm"][field][i][subfield] + else: + del request_init["partner_sse_realm"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_partner_sse_realm(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_partner_sse_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_create_partner_sse_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_create_partner_sse_realm_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_create_partner_sse_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.CreatePartnerSSERealmRequest.pb( + sse_realm.CreatePartnerSSERealmRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.CreatePartnerSSERealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_partner_sse_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_partner_sse_realm_rest_bad_request( + request_type=sse_realm.DeletePartnerSSERealmRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_partner_sse_realm(request) + + +@pytest.mark.parametrize( + "request_type", + [ + sse_realm.DeletePartnerSSERealmRequest, + dict, + ], +) +def test_delete_partner_sse_realm_rest_call_success(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/partnerSSERealms/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_partner_sse_realm(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_partner_sse_realm_rest_interceptors(null_interceptor): + transport = transports.SSERealmServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SSERealmServiceRestInterceptor(), + ) + client = SSERealmServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SSERealmServiceRestInterceptor, "post_delete_partner_sse_realm" + ) as post, mock.patch.object( + transports.SSERealmServiceRestInterceptor, + "post_delete_partner_sse_realm_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SSERealmServiceRestInterceptor, "pre_delete_partner_sse_realm" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = sse_realm.DeletePartnerSSERealmRequest.pb( + sse_realm.DeletePartnerSSERealmRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = sse_realm.DeletePartnerSSERealmRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_partner_sse_realm( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sac_realms_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sac_realms), "__call__") as call: + client.list_sac_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACRealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sac_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sac_realm), "__call__") as call: + client.get_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sac_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sac_realm), "__call__") as call: + client.create_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sac_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sac_realm), "__call__") as call: + client.delete_sac_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACRealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sac_attachments_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sac_attachments), "__call__" + ) as call: + client.list_sac_attachments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListSACAttachmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sac_attachment_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_sac_attachment), "__call__" + ) as call: + client.get_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sac_attachment_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_sac_attachment), "__call__" + ) as call: + client.create_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreateSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sac_attachment_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_sac_attachment), "__call__" + ) as call: + client.delete_sac_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeleteSACAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_partner_sse_realms_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_partner_sse_realms), "__call__" + ) as call: + client.list_partner_sse_realms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.ListPartnerSSERealmsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_sse_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_sse_realm), "__call__" + ) as call: + client.get_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.GetPartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_partner_sse_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_partner_sse_realm), "__call__" + ) as call: + client.create_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.CreatePartnerSSERealmRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_partner_sse_realm_empty_call_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_partner_sse_realm), "__call__" + ) as call: + client.delete_partner_sse_realm(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = sse_realm.DeletePartnerSSERealmRequest() + + assert args[0] == request_msg + + +def test_sse_realm_service_rest_lro_client(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SSERealmServiceGrpcTransport, + ) + + +def test_sse_realm_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SSERealmServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_sse_realm_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_realm_service.transports.SSERealmServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SSERealmServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_sac_realms", + "get_sac_realm", + "create_sac_realm", + "delete_sac_realm", + "list_sac_attachments", + "get_sac_attachment", + "create_sac_attachment", + "delete_sac_attachment", + "list_partner_sse_realms", + "get_partner_sse_realm", + "create_partner_sse_realm", + "delete_partner_sse_realm", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_sse_realm_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_realm_service.transports.SSERealmServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SSERealmServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_sse_realm_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1alpha1.services.sse_realm_service.transports.SSERealmServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SSERealmServiceTransport() + adc.assert_called_once() + + +def test_sse_realm_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SSERealmServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + ], +) +def test_sse_realm_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + transports.SSERealmServiceRestTransport, + ], +) +def test_sse_realm_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SSERealmServiceGrpcTransport, grpc_helpers), + (transports.SSERealmServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_sse_realm_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + ], +) +def test_sse_realm_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_sse_realm_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SSERealmServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_sse_realm_service_host_no_port(transport_name): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_sse_realm_service_host_with_port(transport_name): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_sse_realm_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SSERealmServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SSERealmServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_sac_realms._session + session2 = client2.transport.list_sac_realms._session + assert session1 != session2 + session1 = client1.transport.get_sac_realm._session + session2 = client2.transport.get_sac_realm._session + assert session1 != session2 + session1 = client1.transport.create_sac_realm._session + session2 = client2.transport.create_sac_realm._session + assert session1 != session2 + session1 = client1.transport.delete_sac_realm._session + session2 = client2.transport.delete_sac_realm._session + assert session1 != session2 + session1 = client1.transport.list_sac_attachments._session + session2 = client2.transport.list_sac_attachments._session + assert session1 != session2 + session1 = client1.transport.get_sac_attachment._session + session2 = client2.transport.get_sac_attachment._session + assert session1 != session2 + session1 = client1.transport.create_sac_attachment._session + session2 = client2.transport.create_sac_attachment._session + assert session1 != session2 + session1 = client1.transport.delete_sac_attachment._session + session2 = client2.transport.delete_sac_attachment._session + assert session1 != session2 + session1 = client1.transport.list_partner_sse_realms._session + session2 = client2.transport.list_partner_sse_realms._session + assert session1 != session2 + session1 = client1.transport.get_partner_sse_realm._session + session2 = client2.transport.get_partner_sse_realm._session + assert session1 != session2 + session1 = client1.transport.create_partner_sse_realm._session + session2 = client2.transport.create_partner_sse_realm._session + assert session1 != session2 + session1 = client1.transport.delete_partner_sse_realm._session + session2 = client2.transport.delete_partner_sse_realm._session + assert session1 != session2 + + +def test_sse_realm_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SSERealmServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_sse_realm_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SSERealmServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + ], +) +def test_sse_realm_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SSERealmServiceGrpcTransport, + transports.SSERealmServiceGrpcAsyncIOTransport, + ], +) +def test_sse_realm_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_sse_realm_service_grpc_lro_client(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_sse_realm_service_grpc_lro_async_client(): + client = SSERealmServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_partner_sse_realm_path(): + project = "squid" + location = "clam" + partner_sse_realm = "whelk" + expected = "projects/{project}/locations/{location}/partnerSSERealms/{partner_sse_realm}".format( + project=project, + location=location, + partner_sse_realm=partner_sse_realm, + ) + actual = SSERealmServiceClient.partner_sse_realm_path( + project, location, partner_sse_realm + ) + assert expected == actual + + +def test_parse_partner_sse_realm_path(): + expected = { + "project": "octopus", + "location": "oyster", + "partner_sse_realm": "nudibranch", + } + path = SSERealmServiceClient.partner_sse_realm_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_partner_sse_realm_path(path) + assert expected == actual + + +def test_sac_attachment_path(): + project = "cuttlefish" + location = "mussel" + sac_attachment = "winkle" + expected = "projects/{project}/locations/{location}/sacAttachments/{sac_attachment}".format( + project=project, + location=location, + sac_attachment=sac_attachment, + ) + actual = SSERealmServiceClient.sac_attachment_path( + project, location, sac_attachment + ) + assert expected == actual + + +def test_parse_sac_attachment_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "sac_attachment": "abalone", + } + path = SSERealmServiceClient.sac_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_sac_attachment_path(path) + assert expected == actual + + +def test_sac_realm_path(): + project = "squid" + location = "clam" + sac_realm = "whelk" + expected = "projects/{project}/locations/{location}/sacRealms/{sac_realm}".format( + project=project, + location=location, + sac_realm=sac_realm, + ) + actual = SSERealmServiceClient.sac_realm_path(project, location, sac_realm) + assert expected == actual + + +def test_parse_sac_realm_path(): + expected = { + "project": "octopus", + "location": "oyster", + "sac_realm": "nudibranch", + } + path = SSERealmServiceClient.sac_realm_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_sac_realm_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SSERealmServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = SSERealmServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SSERealmServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = SSERealmServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SSERealmServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = SSERealmServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = SSERealmServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = SSERealmServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SSERealmServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = SSERealmServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SSERealmServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SSERealmServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SSERealmServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SSERealmServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = SSERealmServiceClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = SSERealmServiceAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SSERealmServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SSERealmServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SSERealmServiceClient, transports.SSERealmServiceGrpcTransport), + (SSERealmServiceAsyncClient, transports.SSERealmServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + )